mirror of
https://github.com/ClusterCockpit/cc-backend
synced 2024-12-25 12:59:06 +01:00
Adapt svelte to new schema, add removed metric box
- Moved 'scope' field to parent jobMetric - Implemented unit { prefix, base } where necessary - SubCluster Metric Config 'remove' option implemented in Joblists
This commit is contained in:
parent
6b84e65d88
commit
0c1b66aad9
@ -269,6 +269,7 @@ func (r *queryResolver) NodeMetrics(ctx context.Context, cluster string, nodes [
|
||||
for _, scopedMetric := range scopedMetrics {
|
||||
host.Metrics = append(host.Metrics, &model.JobMetricWithName{
|
||||
Name: metric,
|
||||
Scope: schema.MetricScopeNode, // NodeMetrics allow fixed scope?
|
||||
Metric: scopedMetric,
|
||||
})
|
||||
}
|
||||
@ -282,7 +283,16 @@ func (r *queryResolver) NodeMetrics(ctx context.Context, cluster string, nodes [
|
||||
|
||||
// NumberOfNodes is the resolver for the numberOfNodes field.
|
||||
func (r *subClusterResolver) NumberOfNodes(ctx context.Context, obj *schema.SubCluster) (int, error) {
|
||||
panic(fmt.Errorf("not implemented: NumberOfNodes - numberOfNodes"))
|
||||
nodeList, err := archive.ParseNodeList(obj.Nodes)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
// log.Debugf(">>>> See raw list definition here: %v", nodeList)
|
||||
stringList := nodeList.PrintList()
|
||||
// log.Debugf(">>>> See parsed list here: %v", stringList)
|
||||
numOfNodes := len(stringList)
|
||||
// log.Debugf(">>>> See numOfNodes here: %v", len(stringList))
|
||||
return numOfNodes, nil
|
||||
}
|
||||
|
||||
// Cluster returns generated.ClusterResolver implementation.
|
||||
|
@ -324,7 +324,10 @@ func ArchiveJob(job *schema.Job, ctx context.Context) (*schema.JobMeta, error) {
|
||||
}
|
||||
|
||||
jobMeta.Statistics[metric] = schema.JobStatistics{
|
||||
Unit: archive.GetMetricConfig(job.Cluster, metric).Unit,
|
||||
Unit: schema.Unit{
|
||||
Prefix: archive.GetMetricConfig(job.Cluster, metric).Unit.Prefix,
|
||||
Base: archive.GetMetricConfig(job.Cluster, metric).Unit.Base,
|
||||
},
|
||||
Avg: avg / float64(job.NumNodes),
|
||||
Min: min,
|
||||
Max: max,
|
||||
|
@ -14,6 +14,8 @@ import (
|
||||
|
||||
type NodeList [][]interface {
|
||||
consume(input string) (next string, ok bool)
|
||||
limits() []map[string]int64
|
||||
prefix() string
|
||||
}
|
||||
|
||||
func (nl *NodeList) Contains(name string) bool {
|
||||
@ -35,6 +37,29 @@ func (nl *NodeList) Contains(name string) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func (nl *NodeList) PrintList() []string {
|
||||
var out []string
|
||||
for _, term := range *nl {
|
||||
// log.Debugf("Term: %v", term)
|
||||
|
||||
prefix := term[0].prefix()
|
||||
// log.Debugf("Prefix as String: %s", prefix)
|
||||
|
||||
limitArr := term[1].limits()
|
||||
for _, inner := range limitArr {
|
||||
for i := inner["start"]; i < inner["end"]+1; i++ {
|
||||
node := fmt.Sprintf("%s%02d", prefix, i)
|
||||
out = append(out, node)
|
||||
}
|
||||
// log.Debugf("Inner Map @ %d: %#v", indx, inner)
|
||||
// log.Debugf("Start: %#v", inner["start"])
|
||||
// log.Debugf("End: %#v", inner["end"])
|
||||
}
|
||||
}
|
||||
// log.Debugf("Node List as Strings: %#v", out)
|
||||
return out
|
||||
}
|
||||
|
||||
type NLExprString string
|
||||
|
||||
func (nle NLExprString) consume(input string) (next string, ok bool) {
|
||||
@ -45,6 +70,16 @@ func (nle NLExprString) consume(input string) (next string, ok bool) {
|
||||
return "", false
|
||||
}
|
||||
|
||||
func (nle NLExprString) limits() []map[string]int64 {
|
||||
// Null implementation to fullfill interface requirement
|
||||
l := make([]map[string]int64, 0)
|
||||
return l
|
||||
}
|
||||
|
||||
func (nle NLExprString) prefix() string {
|
||||
return string(nle)
|
||||
}
|
||||
|
||||
type NLExprIntRanges []NLExprIntRange
|
||||
|
||||
func (nles NLExprIntRanges) consume(input string) (next string, ok bool) {
|
||||
@ -56,6 +91,22 @@ func (nles NLExprIntRanges) consume(input string) (next string, ok bool) {
|
||||
return "", false
|
||||
}
|
||||
|
||||
func (nles NLExprIntRanges) limits() []map[string]int64 {
|
||||
l := make([]map[string]int64, 0)
|
||||
for _, nle := range nles {
|
||||
inner := nle.limits()
|
||||
// log.Debugf("limits @ nles: %#v", inner)
|
||||
l = append(l, inner[0])
|
||||
}
|
||||
return l
|
||||
}
|
||||
|
||||
func (nles NLExprIntRanges) prefix() string {
|
||||
// Null implementation to fullfill interface requirement
|
||||
var s string
|
||||
return s
|
||||
}
|
||||
|
||||
type NLExprIntRange struct {
|
||||
start, end int64
|
||||
zeroPadded bool
|
||||
@ -89,6 +140,22 @@ func (nle NLExprIntRange) consume(input string) (next string, ok bool) {
|
||||
return "", false
|
||||
}
|
||||
|
||||
func (nle NLExprIntRange) limits() []map[string]int64 {
|
||||
l := make([]map[string]int64, 0)
|
||||
m := make(map[string]int64)
|
||||
m["start"] = nle.start
|
||||
m["end"] = nle.end
|
||||
l = append(l, m)
|
||||
// log.Debugf("limits @ nle: %#v", l)
|
||||
return l
|
||||
}
|
||||
|
||||
func (nles NLExprIntRange) prefix() string {
|
||||
// Null implementation to fullfill interface requirement
|
||||
var s string
|
||||
return s
|
||||
}
|
||||
|
||||
func ParseNodeList(raw string) (NodeList, error) {
|
||||
isLetter := func(r byte) bool { return ('a' <= r && r <= 'z') || ('A' <= r && r <= 'Z') }
|
||||
isDigit := func(r byte) bool { return '0' <= r && r <= '9' }
|
||||
@ -116,6 +183,8 @@ func ParseNodeList(raw string) (NodeList, error) {
|
||||
for _, rawterm := range rawterms {
|
||||
exprs := []interface {
|
||||
consume(input string) (next string, ok bool)
|
||||
limits() []map[string]int64
|
||||
prefix() string
|
||||
}{}
|
||||
for i := 0; i < len(rawterm); i++ {
|
||||
c := rawterm[i]
|
||||
|
@ -30,8 +30,8 @@
|
||||
let rooflineMaxY
|
||||
let colWidth
|
||||
let numBins = 50
|
||||
const ccconfig = getContext('cc-config'),
|
||||
metricConfig = getContext('metrics')
|
||||
const ccconfig = getContext('cc-config')
|
||||
const metricConfig = getContext('metrics')
|
||||
|
||||
let metricsInHistograms = ccconfig.analysis_view_histogramMetrics,
|
||||
metricsInScatterplots = ccconfig.analysis_view_scatterPlotMetrics
|
||||
@ -161,24 +161,29 @@
|
||||
<Histogram
|
||||
width={colWidth - 25} height={300 * 0.5}
|
||||
data={$statsQuery.data.topUsers.sort((a, b) => b.count - a.count).map(({ count }, idx) => ({ count, value: idx }))}
|
||||
label={(x) => x < $statsQuery.data.topUsers.length ? $statsQuery.data.topUsers[Math.floor(x)].name : '0'} />
|
||||
label={(x) => x < $statsQuery.data.topUsers.length ? $statsQuery.data.topUsers[Math.floor(x)].name : 'No Users'}
|
||||
ylabel="Node Hours [h]"/>
|
||||
{/key}
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-3">
|
||||
{#key $statsQuery.data.stats[0].histDuration}
|
||||
<h4>Walltime Distribution</h4>
|
||||
<h4>Duration Distribution</h4>
|
||||
<Histogram
|
||||
width={colWidth - 25} height={300}
|
||||
data={$statsQuery.data.stats[0].histDuration} />
|
||||
width={colWidth - 25}
|
||||
data={$statsQuery.data.stats[0].histDuration}
|
||||
xlabel="Current Runtimes [h]"
|
||||
ylabel="Number of Jobs"/>
|
||||
{/key}
|
||||
</div>
|
||||
<div class="col-3">
|
||||
{#key $statsQuery.data.stats[0].histNumNodes}
|
||||
<h4>Number of Nodes Distribution</h4>
|
||||
<Histogram
|
||||
width={colWidth - 25} height={300}
|
||||
data={$statsQuery.data.stats[0].histNumNodes} />
|
||||
width={colWidth - 25}
|
||||
data={$statsQuery.data.stats[0].histNumNodes}
|
||||
xlabel="Allocated Nodes [#]"
|
||||
ylabel="Number of Jobs" />
|
||||
{/key}
|
||||
</div>
|
||||
<div class="col-3">
|
||||
@ -189,7 +194,7 @@
|
||||
{:else if $rooflineQuery.data && cluster}
|
||||
{#key $rooflineQuery.data}
|
||||
<Roofline
|
||||
width={colWidth - 25} height={300}
|
||||
width={colWidth - 25}
|
||||
tiles={$rooflineQuery.data.rooflineHeatmap}
|
||||
cluster={cluster.subClusters.length == 1 ? cluster.subClusters[0] : null}
|
||||
maxY={rooflineMaxY} />
|
||||
@ -224,12 +229,16 @@
|
||||
$footprintsQuery.data.footprints.nodehours,
|
||||
$footprintsQuery.data.footprints.metrics.find(f => f.metric == metric).data, numBins) }))}
|
||||
itemsPerRow={ccconfig.plot_view_plotsPerRow}>
|
||||
<h4>{item.metric} [{metricConfig(cluster.name, item.metric)?.unit}]</h4>
|
||||
<h4>Average Distribution of '{item.metric}'</h4>
|
||||
|
||||
<Histogram
|
||||
width={width} height={250}
|
||||
min={item.min} max={item.max}
|
||||
data={item.bins} label={item.label} />
|
||||
data={item.bins}
|
||||
label={item.label}
|
||||
xlabel={`${item.metric} Average [${(metricConfig(cluster.name, item.metric)?.unit?.prefix ? metricConfig(cluster.name, item.metric)?.unit?.prefix : '') +
|
||||
(metricConfig(cluster.name, item.metric)?.unit?.base ? metricConfig(cluster.name, item.metric)?.unit?.base : '')}]`}
|
||||
ylabel="Node Hours [h]" />
|
||||
</PlotTable>
|
||||
</Col>
|
||||
</Row>
|
||||
@ -254,12 +263,18 @@
|
||||
|
||||
<ScatterPlot
|
||||
width={width} height={250} color={"rgba(0, 102, 204, 0.33)"}
|
||||
xLabel={`${item.m1} [${metricConfig(cluster.name, item.m1)?.unit}]`}
|
||||
yLabel={`${item.m2} [${metricConfig(cluster.name, item.m2)?.unit}]`}
|
||||
xLabel={`${item.m1} [${(metricConfig(cluster.name, item.m1)?.unit?.prefix ? metricConfig(cluster.name, item.m1)?.unit?.prefix : '') +
|
||||
(metricConfig(cluster.name, item.m1)?.unit?.base ? metricConfig(cluster.name, item.m1)?.unit?.base : '')}]`}
|
||||
yLabel={`${item.m2} [${(metricConfig(cluster.name, item.m2)?.unit?.prefix ? metricConfig(cluster.name, item.m2)?.unit?.prefix : '') +
|
||||
(metricConfig(cluster.name, item.m2)?.unit?.base ? metricConfig(cluster.name, item.m2)?.unit?.base : '')}]`}
|
||||
X={item.f1} Y={item.f2} S={$footprintsQuery.data.footprints.nodehours} />
|
||||
</PlotTable>
|
||||
</Col>
|
||||
</Row>
|
||||
{/if}
|
||||
|
||||
|
||||
<style>
|
||||
h4 {
|
||||
text-align: center;
|
||||
}
|
||||
</style>
|
||||
|
@ -81,7 +81,7 @@
|
||||
missingMetrics = metricNames.filter(metric => !metrics.some(jm => jm.name == metric))
|
||||
missingHosts = job.resources.map(({ hostname }) => ({
|
||||
hostname: hostname,
|
||||
metrics: metricNames.filter(metric => !metrics.some(jm => jm.metric.scope == 'node' && jm.metric.series.some(series => series.hostname == hostname)))
|
||||
metrics: metricNames.filter(metric => !metrics.some(jm => jm.scope == 'node' && jm.metric.series.some(series => series.hostname == hostname)))
|
||||
})).filter(({ metrics }) => metrics.length > 0)
|
||||
somethingMissing = missingMetrics.length > 0 || missingHosts.length > 0
|
||||
}
|
||||
@ -114,8 +114,8 @@
|
||||
cluster={clusters
|
||||
.find(c => c.name == $initq.data.job.cluster).subClusters
|
||||
.find(sc => sc.name == $initq.data.job.subCluster)}
|
||||
flopsAny={$jobMetrics.data.jobMetrics.find(m => m.name == 'flops_any' && m.metric.scope == 'node').metric}
|
||||
memBw={$jobMetrics.data.jobMetrics.find(m => m.name == 'mem_bw' && m.metric.scope == 'node').metric} />
|
||||
flopsAny={$jobMetrics.data.jobMetrics.find(m => m.name == 'flops_any' && m.scope == 'node').metric}
|
||||
memBw={$jobMetrics.data.jobMetrics.find(m => m.name == 'mem_bw' && m.scope == 'node').metric} />
|
||||
</Col>
|
||||
{:else}
|
||||
<Col></Col>
|
||||
@ -163,8 +163,9 @@
|
||||
bind:this={plots[item.metric]}
|
||||
on:more-loaded={({ detail }) => statsTable.moreLoaded(detail)}
|
||||
job={$initq.data.job}
|
||||
metric={item.metric}
|
||||
scopes={item.data.map(x => x.metric)}
|
||||
metricName={item.metric}
|
||||
rawData={item.data.map(x => x.metric)}
|
||||
scopes={item.data.map(x => x.scope)}
|
||||
width={width}/>
|
||||
{:else}
|
||||
<Card body color="warning">No data for <code>{item.metric}</code></Card>
|
||||
|
@ -5,21 +5,36 @@
|
||||
import { fetchMetrics, minScope } from './utils'
|
||||
|
||||
export let job
|
||||
export let metric
|
||||
export let metricName
|
||||
export let scopes
|
||||
export let width
|
||||
export let rawData
|
||||
|
||||
const dispatch = createEventDispatcher()
|
||||
const cluster = getContext('clusters').find(cluster => cluster.name == job.cluster)
|
||||
const subCluster = cluster.subClusters.find(subCluster => subCluster.name == job.subCluster)
|
||||
const metricConfig = cluster.metricConfig.find(metricConfig => metricConfig.name == metric)
|
||||
const metricConfig = cluster.metricConfig.find(metricConfig => metricConfig.name == metricName)
|
||||
|
||||
let selectedScope = minScope(scopes.map(s => s.scope)), selectedHost = null, plot, fetching = false, error = null
|
||||
let selectedHost = null, plot, fetching = false, error = null
|
||||
let selectedScope = minScope(scopes)
|
||||
let selectedScopeIndex = scopes.findIndex(s => s == selectedScope)
|
||||
|
||||
$: avaliableScopes = scopes.map(metric => metric.scope)
|
||||
$: data = scopes.find(metric => metric.scope == selectedScope)
|
||||
// console.log('- Inputs -')
|
||||
// console.log(metricName)
|
||||
// console.log(scopes)
|
||||
// console.log(rawData)
|
||||
// console.log('- Prep Scopes -')
|
||||
// console.log(selectedScope)
|
||||
// console.log(selectedScopeIndex)
|
||||
|
||||
$: avaliableScopes = scopes
|
||||
$: data = rawData[selectedScopeIndex]
|
||||
$: series = data?.series.filter(series => selectedHost == null || series.hostname == selectedHost)
|
||||
|
||||
// console.log('- Prep Data -')
|
||||
// console.log(rawData[selectedScopeIndex])
|
||||
// console.log(rawData[selectedScopeIndex].series.filter(series => selectedHost == null || series.hostname == selectedHost))
|
||||
|
||||
let from = null, to = null
|
||||
export function setTimeRange(f, t) {
|
||||
from = f, to = t
|
||||
@ -29,7 +44,7 @@
|
||||
|
||||
export async function loadMore() {
|
||||
fetching = true
|
||||
let response = await fetchMetrics(job, [metric], ["core"])
|
||||
let response = await fetchMetrics(job, [metricName], ["core"])
|
||||
fetching = false
|
||||
|
||||
if (response.error) {
|
||||
@ -38,9 +53,9 @@
|
||||
}
|
||||
|
||||
for (let jm of response.data.jobMetrics) {
|
||||
if (jm.metric.scope != "node") {
|
||||
if (jm.scope != "node") {
|
||||
scopes.push(jm.metric)
|
||||
selectedScope = jm.metric.scope
|
||||
selectedScope = jm.scope
|
||||
dispatch('more-loaded', jm)
|
||||
if (!avaliableScopes.includes(selectedScope))
|
||||
avaliableScopes = [...avaliableScopes, selectedScope]
|
||||
@ -52,7 +67,8 @@
|
||||
</script>
|
||||
<InputGroup>
|
||||
<InputGroupText style="min-width: 150px;">
|
||||
{metric} ({metricConfig?.unit})
|
||||
{metricName} ({(metricConfig?.unit?.prefix ? metricConfig.unit.prefix : '') +
|
||||
(metricConfig?.unit?.base ? metricConfig.unit.base : '')})
|
||||
</InputGroupText>
|
||||
<select class="form-select" bind:value={selectedScope}>
|
||||
{#each avaliableScopes as scope}
|
||||
@ -82,7 +98,7 @@
|
||||
width={width} height={300}
|
||||
cluster={cluster} subCluster={subCluster}
|
||||
timestep={data.timestep}
|
||||
scope={selectedScope} metric={metric}
|
||||
scope={selectedScope} metric={metricName}
|
||||
series={series} />
|
||||
{/if}
|
||||
{/key}
|
||||
|
@ -20,16 +20,19 @@
|
||||
from.setMinutes(from.getMinutes() - 30)
|
||||
}
|
||||
|
||||
const ccconfig = getContext('cc-config'), clusters = getContext('clusters')
|
||||
const ccconfig = getContext('cc-config')
|
||||
const clusters = getContext('clusters')
|
||||
|
||||
const nodesQuery = operationStore(`query($cluster: String!, $nodes: [String!], $from: Time!, $to: Time!) {
|
||||
nodeMetrics(cluster: $cluster, nodes: $nodes, from: $from, to: $to) {
|
||||
host, subCluster
|
||||
host
|
||||
subCluster
|
||||
metrics {
|
||||
name,
|
||||
name
|
||||
scope
|
||||
metric {
|
||||
timestep
|
||||
scope
|
||||
unit { base, prefix }
|
||||
series {
|
||||
statistics { min, avg, max }
|
||||
data
|
||||
@ -46,6 +49,17 @@
|
||||
|
||||
$: $nodesQuery.variables = { cluster, nodes: [hostname], from: from.toISOString(), to: to.toISOString() }
|
||||
|
||||
let metricUnits = {}
|
||||
$: if ($nodesQuery.data) {
|
||||
for (let metric of clusters.find(c => c.name == cluster).metricConfig) {
|
||||
if (metric.unit.prefix || metric.unit.base) {
|
||||
metricUnits[metric.name] = '(' + (metric.unit.prefix ? metric.unit.prefix : '') + (metric.unit.base ? metric.unit.base : '') + ')'
|
||||
} else { // If no unit defined: Omit Unit Display
|
||||
metricUnits[metric.name] = ''
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
query(nodesQuery)
|
||||
|
||||
$: console.log($nodesQuery?.data?.nodeMetrics[0].metrics)
|
||||
@ -83,7 +97,7 @@
|
||||
let:width
|
||||
itemsPerRow={ccconfig.plot_view_plotsPerRow}
|
||||
items={$nodesQuery.data.nodeMetrics[0].metrics.sort((a, b) => a.name.localeCompare(b.name))}>
|
||||
<h4 style="text-align: center;">{item.name}</h4>
|
||||
<h4 style="text-align: center;">{item.name} {metricUnits[item.name]}</h4>
|
||||
<MetricPlot
|
||||
width={width} height={300} metric={item.name} timestep={item.metric.timestep}
|
||||
cluster={clusters.find(c => c.name == cluster)} subCluster={$nodesQuery.data.nodeMetrics[0].subCluster}
|
||||
|
@ -11,7 +11,7 @@
|
||||
const allMetrics = [...new Set(jobMetrics.map(m => m.name))].sort(),
|
||||
scopesForMetric = (metric) => jobMetrics
|
||||
.filter(jm => jm.name == metric)
|
||||
.map(jm => jm.metric.scope)
|
||||
.map(jm => jm.scope)
|
||||
|
||||
let hosts = job.resources.map(r => r.hostname).sort(),
|
||||
selectedScopes = {},
|
||||
@ -40,7 +40,7 @@
|
||||
s.active = true
|
||||
}
|
||||
|
||||
let series = jobMetrics.find(jm => jm.name == metric && jm.metric.scope == 'node')?.metric.series
|
||||
let series = jobMetrics.find(jm => jm.name == metric && jm.scope == 'node')?.metric.series
|
||||
sorting = {...sorting}
|
||||
hosts = hosts.sort((h1, h2) => {
|
||||
let s1 = series.find(s => s.hostname == h1)?.statistics
|
||||
|
@ -5,7 +5,7 @@
|
||||
export let jobMetrics
|
||||
|
||||
$: series = jobMetrics
|
||||
.find(jm => jm.name == metric && jm.metric.scope == scope)
|
||||
.find(jm => jm.name == metric && jm.scope == scope)
|
||||
?.metric.series.filter(s => s.hostname == host && s.statistics != null)
|
||||
</script>
|
||||
|
||||
|
@ -15,13 +15,14 @@
|
||||
let from = new Date(Date.now() - 5 * 60 * 1000), to = new Date(Date.now())
|
||||
const mainQuery = operationStore(`query($cluster: String!, $filter: [JobFilter!]!, $metrics: [String!], $from: Time!, $to: Time!) {
|
||||
nodeMetrics(cluster: $cluster, metrics: $metrics, from: $from, to: $to) {
|
||||
host,
|
||||
subCluster,
|
||||
host
|
||||
subCluster
|
||||
metrics {
|
||||
name,
|
||||
metric {
|
||||
name
|
||||
scope
|
||||
timestep,
|
||||
metric {
|
||||
timestep
|
||||
unit { base, prefix }
|
||||
series { data }
|
||||
}
|
||||
}
|
||||
@ -47,13 +48,15 @@
|
||||
? sum + (node.metrics.find(m => m.name == metric)?.metric.series.reduce((sum, series) => sum + series.data[series.data.length - 1], 0) || 0)
|
||||
: sum, 0)
|
||||
|
||||
let allocatedNodes = {}, flopRate = {}, memBwRate = {}
|
||||
let allocatedNodes = {}, flopRate = {}, flopRateUnit = {}, memBwRate = {}, memBwRateUnit = {}
|
||||
$: if ($initq.data && $mainQuery.data) {
|
||||
let subClusters = $initq.data.clusters.find(c => c.name == cluster).subClusters
|
||||
for (let subCluster of subClusters) {
|
||||
allocatedNodes[subCluster.name] = $mainQuery.data.allocatedNodes.find(({ name }) => name == subCluster.name)?.count || 0
|
||||
flopRate[subCluster.name] = Math.floor(sumUp($mainQuery.data.nodeMetrics, subCluster.name, 'flops_any') * 100) / 100
|
||||
flopRateUnit[subCluster.name] = subCluster.flopRateSimd.unit.prefix + subCluster.flopRateSimd.unit.base
|
||||
memBwRate[subCluster.name] = Math.floor(sumUp($mainQuery.data.nodeMetrics, subCluster.name, 'mem_bw') * 100) / 100
|
||||
memBwRateUnit[subCluster.name] = subCluster.memoryBandwidth.unit.prefix + subCluster.memoryBandwidth.unit.base
|
||||
}
|
||||
}
|
||||
|
||||
@ -116,13 +119,13 @@
|
||||
</tr>
|
||||
<tr>
|
||||
<th scope="col">Flop Rate (Any) <Icon name="info-circle" class="p-1" style="cursor: help;" title="Flops[Any] = (Flops[Double] x 2) + Flops[Single]"/></th>
|
||||
<td style="min-width: 100px;"><div class="col"><Progress value={flopRate[subCluster.name]} max={subCluster.flopRateSimd * subCluster.numberOfNodes}/></div></td>
|
||||
<td>({formatNumber(flopRate[subCluster.name])}Flops/s / {formatNumber((subCluster.flopRateSimd * subCluster.numberOfNodes))}Flops/s [Max])</td>
|
||||
<td style="min-width: 100px;"><div class="col"><Progress value={flopRate[subCluster.name]} max={subCluster.flopRateSimd.value * subCluster.numberOfNodes}/></div></td>
|
||||
<td>({flopRate[subCluster.name]} {flopRateUnit[subCluster.name]} / {(subCluster.flopRateSimd.value * subCluster.numberOfNodes)} {flopRateUnit[subCluster.name]} [Max])</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<th scope="col">MemBw Rate</th>
|
||||
<td style="min-width: 100px;"><div class="col"><Progress value={memBwRate[subCluster.name]} max={subCluster.memoryBandwidth * subCluster.numberOfNodes}/></div></td>
|
||||
<td>({formatNumber(memBwRate[subCluster.name])}Byte/s / {formatNumber((subCluster.memoryBandwidth * subCluster.numberOfNodes))}Byte/s [Max])</td>
|
||||
<td style="min-width: 100px;"><div class="col"><Progress value={memBwRate[subCluster.name]} max={subCluster.memoryBandwidth.value * subCluster.numberOfNodes}/></div></td>
|
||||
<td>({memBwRate[subCluster.name]} {memBwRateUnit[subCluster.name]} / {(subCluster.memoryBandwidth.value * subCluster.numberOfNodes)} {memBwRateUnit[subCluster.name]} [Max])</td>
|
||||
</tr>
|
||||
</Table>
|
||||
</CardBody>
|
||||
@ -150,7 +153,7 @@
|
||||
<h4 class="mb-3 text-center">Top Users</h4>
|
||||
{#key $mainQuery.data}
|
||||
<Histogram
|
||||
width={colWidth1 - 25} height={300}
|
||||
width={colWidth1 - 25}
|
||||
data={$mainQuery.data.topUsers.sort((a, b) => b.count - a.count).map(({ count }, idx) => ({ count, value: idx }))}
|
||||
label={(x) => x < $mainQuery.data.topUsers.length ? $mainQuery.data.topUsers[Math.floor(x)].name : '0'}
|
||||
xlabel="User Name" ylabel="Number of Jobs" />
|
||||
@ -172,7 +175,7 @@
|
||||
<h4 class="mb-3 text-center">Top Projects</h4>
|
||||
{#key $mainQuery.data}
|
||||
<Histogram
|
||||
width={colWidth1 - 25} height={300}
|
||||
width={colWidth1 - 25}
|
||||
data={$mainQuery.data.topProjects.sort((a, b) => b.count - a.count).map(({ count }, idx) => ({ count, value: idx }))}
|
||||
label={(x) => x < $mainQuery.data.topProjects.length ? $mainQuery.data.topProjects[Math.floor(x)].name : '0'}
|
||||
xlabel="Project Code" ylabel="Number of Jobs" />
|
||||
@ -193,9 +196,10 @@
|
||||
<h4 class="mb-3 text-center">Duration Distribution</h4>
|
||||
{#key $mainQuery.data.stats}
|
||||
<Histogram
|
||||
width={colWidth2 - 25} height={300}
|
||||
width={colWidth2 - 25}
|
||||
data={$mainQuery.data.stats[0].histDuration}
|
||||
xlabel="Current Runtime in Hours [h]" ylabel="Number of Jobs" />
|
||||
xlabel="Current Runtimes [h]"
|
||||
ylabel="Number of Jobs" />
|
||||
{/key}
|
||||
</div>
|
||||
</Col>
|
||||
@ -203,9 +207,10 @@
|
||||
<h4 class="mb-3 text-center">Number of Nodes Distribution</h4>
|
||||
{#key $mainQuery.data.stats}
|
||||
<Histogram
|
||||
width={colWidth2 - 25} height={300}
|
||||
width={colWidth2 - 25}
|
||||
data={$mainQuery.data.stats[0].histNumNodes}
|
||||
xlabel="Allocated Nodes" ylabel="Number of Jobs" />
|
||||
xlabel="Allocated Nodes [#]"
|
||||
ylabel="Number of Jobs" />
|
||||
{/key}
|
||||
</Col>
|
||||
</Row>
|
||||
|
@ -28,13 +28,14 @@
|
||||
|
||||
const nodesQuery = operationStore(`query($cluster: String!, $metrics: [String!], $from: Time!, $to: Time!) {
|
||||
nodeMetrics(cluster: $cluster, metrics: $metrics, from: $from, to: $to) {
|
||||
host,
|
||||
host
|
||||
subCluster
|
||||
metrics {
|
||||
name,
|
||||
metric {
|
||||
name
|
||||
scope
|
||||
timestep,
|
||||
metric {
|
||||
timestep
|
||||
unit { base, prefix }
|
||||
series {
|
||||
statistics { min, avg, max }
|
||||
data
|
||||
@ -49,6 +50,18 @@
|
||||
to: to.toISOString()
|
||||
})
|
||||
|
||||
let metricUnits = {}
|
||||
$: if ($nodesQuery.data) {
|
||||
let thisCluster = clusters.find(c => c.name == cluster)
|
||||
for (let metric of thisCluster.metricConfig) {
|
||||
if (metric.unit.prefix || metric.unit.base) {
|
||||
metricUnits[metric.name] = '(' + (metric.unit.prefix ? metric.unit.prefix : '') + (metric.unit.base ? metric.unit.base : '') + ')'
|
||||
} else { // If no unit defined: Omit Unit Display
|
||||
metricUnits[metric.name] = ''
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
$: $nodesQuery.variables = { cluster, metrics: [selectedMetric], from: from.toISOString(), to: to.toISOString() }
|
||||
|
||||
query(nodesQuery)
|
||||
@ -71,7 +84,7 @@
|
||||
<InputGroupText>Metric</InputGroupText>
|
||||
<select class="form-select" bind:value={selectedMetric}>
|
||||
{#each clusters.find(c => c.name == cluster).metricConfig as metric}
|
||||
<option value={metric.name}>{metric.name} ({metric.unit})</option>
|
||||
<option value={metric.name}>{metric.name} {metricUnits[metric.name]}</option>
|
||||
{/each}
|
||||
</select>
|
||||
</InputGroup>
|
||||
@ -98,8 +111,8 @@
|
||||
let:width
|
||||
itemsPerRow={ccconfig.plot_view_plotsPerRow}
|
||||
items={$nodesQuery.data.nodeMetrics
|
||||
.filter(h => h.host.includes(hostnameFilter) && h.metrics.some(m => m.name == selectedMetric && m.metric.scope == 'node'))
|
||||
.map(h => ({ host: h.host, subCluster: h.subCluster, data: h.metrics.find(m => m.name == selectedMetric && m.metric.scope == 'node') }))
|
||||
.filter(h => h.host.includes(hostnameFilter) && h.metrics.some(m => m.name == selectedMetric && m.scope == 'node'))
|
||||
.map(h => ({ host: h.host, subCluster: h.subCluster, data: h.metrics.find(m => m.name == selectedMetric && m.scope == 'node') }))
|
||||
.sort((a, b) => a.host.localeCompare(b.host))}>
|
||||
|
||||
<h4 style="width: 100%; text-align: center;"><a href="/monitoring/node/{cluster}/{item.host}">{item.host} ({item.subCluster})</a></h4>
|
||||
|
@ -136,19 +136,23 @@
|
||||
</Table>
|
||||
</Col>
|
||||
<div class="col-4" style="text-align: center;" bind:clientWidth={w1}>
|
||||
<b>Walltime</b>
|
||||
<b>Duration Distribution</b>
|
||||
{#key $stats.data.jobsStatistics[0].histDuration}
|
||||
<Histogram
|
||||
data={$stats.data.jobsStatistics[0].histDuration}
|
||||
width={w1 - 25} height={histogramHeight} />
|
||||
width={w1 - 25} height={histogramHeight}
|
||||
xlabel="Current Runtimes [h]"
|
||||
ylabel="Number of Jobs"/>
|
||||
{/key}
|
||||
</div>
|
||||
<div class="col-4" style="text-align: center;" bind:clientWidth={w2}>
|
||||
<b>Number of Nodes</b>
|
||||
<b>Number of Nodes Distribution</b>
|
||||
{#key $stats.data.jobsStatistics[0].histNumNodes}
|
||||
<Histogram
|
||||
data={$stats.data.jobsStatistics[0].histNumNodes}
|
||||
width={w2 - 25} height={histogramHeight} />
|
||||
width={w2 - 25} height={histogramHeight}
|
||||
xlabel="Allocated Nodes [#]"
|
||||
ylabel="Number of Jobs" />
|
||||
{/key}
|
||||
</div>
|
||||
{/if}
|
||||
|
@ -101,9 +101,11 @@
|
||||
{#if $initialized}
|
||||
({clusters
|
||||
.map(cluster => cluster.metricConfig.find(m => m.name == metric))
|
||||
.filter(m => m != null).map(m => m.unit)
|
||||
.reduce((arr, unit) => arr.includes(unit) ? arr : [...arr, unit], [])
|
||||
.join(', ')})
|
||||
.filter(m => m != null)
|
||||
.map(m => (m.unit?.prefix?m.unit?.prefix:'') + (m.unit?.base?m.unit?.base:'')) // Build unitStr
|
||||
.reduce((arr, unitStr) => arr.includes(unitStr) ? arr : [...arr, unitStr], []) // w/o this, output would be [unitStr, unitStr]
|
||||
.join(', ')
|
||||
})
|
||||
{/if}
|
||||
</th>
|
||||
{/each}
|
||||
|
@ -24,12 +24,14 @@
|
||||
let scopes = [job.numNodes == 1 ? 'core' : 'node']
|
||||
|
||||
const cluster = getContext('clusters').find(c => c.name == job.cluster)
|
||||
|
||||
// Get all MetricConfs which include subCluster-specific settings for this job
|
||||
const metricConfig = getContext('metrics')
|
||||
const metricsQuery = operationStore(`query($id: ID!, $metrics: [String!]!, $scopes: [MetricScope!]!) {
|
||||
jobMetrics(id: $id, metrics: $metrics, scopes: $scopes) {
|
||||
name
|
||||
scope
|
||||
metric {
|
||||
unit, scope, timestep
|
||||
unit { prefix, base }, timestep
|
||||
statisticsSeries { min, mean, max }
|
||||
series {
|
||||
hostname, id, data
|
||||
@ -44,13 +46,64 @@
|
||||
})
|
||||
|
||||
const selectScope = (jobMetrics) => jobMetrics.reduce(
|
||||
(a, b) => maxScope([a.metric.scope, b.metric.scope]) == a.metric.scope
|
||||
(a, b) => maxScope([a.scope, b.scope]) == a.scope
|
||||
? (job.numNodes > 1 ? a : b)
|
||||
: (job.numNodes > 1 ? b : a), jobMetrics[0])
|
||||
|
||||
const sortAndSelectScope = (jobMetrics) => metrics
|
||||
.map(name => jobMetrics.filter(jobMetric => jobMetric.name == name))
|
||||
.map(jobMetrics => jobMetrics.length > 0 ? selectScope(jobMetrics) : null)
|
||||
.map(function(name) {
|
||||
// Get MetricConf for this selected/requested metric
|
||||
let thisConfig = metricConfig(cluster, name)
|
||||
let thisSCIndex = thisConfig.subClusters.findIndex(sc => sc.name == job.subCluster)
|
||||
// Check if Subcluster has MetricConf: If not found (index == -1), no further remove flag check required
|
||||
if (thisSCIndex >= 0) {
|
||||
// SubCluster Config present: Check if remove flag is set
|
||||
if (thisConfig.subClusters[thisSCIndex].remove == true) {
|
||||
// Return null data and informational flag
|
||||
// console.log('Case 1.1 -> Returned')
|
||||
// console.log({removed: true, data: null})
|
||||
return {removed: true, data: null}
|
||||
} else {
|
||||
// load and return metric, if data available
|
||||
let thisMetric = jobMetrics.filter(jobMetric => jobMetric.name == name) // Returns Array
|
||||
if (thisMetric.length > 0) {
|
||||
// console.log('Case 1.2.1 -> Returned')
|
||||
// console.log({removed: false, data: thisMetric})
|
||||
return {removed: false, data: thisMetric}
|
||||
} else {
|
||||
// console.log('Case 1.2.2 -> Returned:')
|
||||
// console.log({removed: false, data: null})
|
||||
return {removed: false, data: null}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// No specific subCluster config: 'remove' flag not set, deemed false -> load and return metric, if data available
|
||||
let thisMetric = jobMetrics.filter(jobMetric => jobMetric.name == name) // Returns Array
|
||||
if (thisMetric.length > 0) {
|
||||
// console.log('Case 2.1 -> Returned')
|
||||
// console.log({removed: false, data: thisMetric})
|
||||
return {removed: false, data: thisMetric}
|
||||
} else {
|
||||
// console.log('Case 2.2 -> Returned')
|
||||
// console.log({removed: false, data: null})
|
||||
return {removed: false, data: null}
|
||||
}
|
||||
}
|
||||
})
|
||||
.map(function(jobMetrics) {
|
||||
if (jobMetrics.data != null && jobMetrics.data.length > 0) {
|
||||
// console.log('Before')
|
||||
// console.log(jobMetrics.data)
|
||||
// console.log('After')
|
||||
// console.log(selectScope(jobMetrics.data))
|
||||
let res = {removed: jobMetrics.removed, data: selectScope(jobMetrics.data)}
|
||||
// console.log('Packed')
|
||||
// console.log(res)
|
||||
return res
|
||||
} else {
|
||||
return jobMetrics
|
||||
}
|
||||
})
|
||||
|
||||
$: metricsQuery.variables = { id: job.id, metrics, scopes }
|
||||
|
||||
@ -81,17 +134,20 @@
|
||||
{:else}
|
||||
{#each sortAndSelectScope($metricsQuery.data.jobMetrics) as metric, i (metric || i)}
|
||||
<td>
|
||||
{#if metric != null}
|
||||
<!-- Subluster Metricconfig remove keyword for jobtables (joblist main, user joblist, project joblist) to be used here as toplevel case-->
|
||||
{#if metric.removed == false && metric.data != null}
|
||||
<MetricPlot
|
||||
width={plotWidth}
|
||||
height={plotHeight}
|
||||
timestep={metric.metric.timestep}
|
||||
scope={metric.metric.scope}
|
||||
series={metric.metric.series}
|
||||
statisticsSeries={metric.metric.statisticsSeries}
|
||||
metric={metric.name}
|
||||
timestep={metric.data.metric.timestep}
|
||||
scope={metric.data.scope}
|
||||
series={metric.data.metric.series}
|
||||
statisticsSeries={metric.data.metric.statisticsSeries}
|
||||
metric={metric.data.name}
|
||||
cluster={cluster}
|
||||
subCluster={job.subCluster} />
|
||||
{:else if metric.removed == true && metric.data == null}
|
||||
<Card body color="info">Metric disabled for subcluster '{ job.subCluster }'</Card>
|
||||
{:else}
|
||||
<Card body color="warning">Missing Data</Card>
|
||||
{/if}
|
||||
|
@ -18,10 +18,10 @@
|
||||
import { onMount } from 'svelte'
|
||||
|
||||
export let data
|
||||
export let width
|
||||
export let height
|
||||
export let xlabel
|
||||
export let ylabel
|
||||
export let width = 500
|
||||
export let height = 300
|
||||
export let xlabel = ''
|
||||
export let ylabel = ''
|
||||
export let min = null
|
||||
export let max = null
|
||||
export let label = formatNumber
|
||||
|
@ -18,7 +18,7 @@
|
||||
let ctx, canvasElement
|
||||
|
||||
const labels = metrics.filter(name => {
|
||||
if (!jobMetrics.find(m => m.name == name && m.metric.scope == "node")) {
|
||||
if (!jobMetrics.find(m => m.name == name && m.scope == "node")) {
|
||||
console.warn(`PolarPlot: No metric data for '${name}'`)
|
||||
return false
|
||||
}
|
||||
@ -27,7 +27,7 @@
|
||||
|
||||
const getValuesForStat = (getStat) => labels.map(name => {
|
||||
const peak = metricConfig(cluster, name).peak
|
||||
const metric = jobMetrics.find(m => m.name == name && m.metric.scope == "node")
|
||||
const metric = jobMetrics.find(m => m.name == name && m.scope == "node")
|
||||
const value = getStat(metric.metric) / peak
|
||||
return value <= 1. ? value : 1.
|
||||
})
|
||||
|
@ -71,7 +71,7 @@
|
||||
if (width <= 0)
|
||||
return
|
||||
|
||||
const [minX, maxX, minY, maxY] = [0.01, 1000, 1., cluster?.flopRateSimd || defaultMaxY]
|
||||
const [minX, maxX, minY, maxY] = [0.01, 1000, 1., cluster?.flopRateSimd?.value || defaultMaxY]
|
||||
const w = width - paddingLeft - paddingRight
|
||||
const h = height - paddingTop - paddingBottom
|
||||
|
||||
@ -185,13 +185,13 @@
|
||||
ctx.lineWidth = 2
|
||||
ctx.beginPath()
|
||||
if (cluster != null) {
|
||||
const ycut = 0.01 * cluster.memoryBandwidth
|
||||
const scalarKnee = (cluster.flopRateScalar - ycut) / cluster.memoryBandwidth
|
||||
const simdKnee = (cluster.flopRateSimd - ycut) / cluster.memoryBandwidth
|
||||
const ycut = 0.01 * cluster.memoryBandwidth.value
|
||||
const scalarKnee = (cluster.flopRateScalar.value - ycut) / cluster.memoryBandwidth.value
|
||||
const simdKnee = (cluster.flopRateSimd.value - ycut) / cluster.memoryBandwidth.value
|
||||
const scalarKneeX = getCanvasX(scalarKnee),
|
||||
simdKneeX = getCanvasX(simdKnee),
|
||||
flopRateScalarY = getCanvasY(cluster.flopRateScalar),
|
||||
flopRateSimdY = getCanvasY(cluster.flopRateSimd)
|
||||
flopRateScalarY = getCanvasY(cluster.flopRateScalar.value),
|
||||
flopRateSimdY = getCanvasY(cluster.flopRateSimd.value)
|
||||
|
||||
if (scalarKneeX < width - paddingRight) {
|
||||
ctx.moveTo(scalarKneeX, flopRateScalarY)
|
||||
@ -270,8 +270,8 @@
|
||||
export function transformPerNodeData(nodes) {
|
||||
const x = [], y = [], c = []
|
||||
for (let node of nodes) {
|
||||
let flopsAny = node.metrics.find(m => m.name == 'flops_any' && m.metric.scope == 'node')?.metric
|
||||
let memBw = node.metrics.find(m => m.name == 'mem_bw' && m.metric.scope == 'node')?.metric
|
||||
let flopsAny = node.metrics.find(m => m.name == 'flops_any' && m.scope == 'node')?.metric
|
||||
let memBw = node.metrics.find(m => m.name == 'mem_bw' && m.scope == 'node')?.metric
|
||||
if (!flopsAny || !memBw)
|
||||
continue
|
||||
|
||||
@ -301,8 +301,8 @@
|
||||
export let memBw = null
|
||||
export let cluster = null
|
||||
export let maxY = null
|
||||
export let width
|
||||
export let height
|
||||
export let width = 500
|
||||
export let height = 300
|
||||
export let tiles = null
|
||||
export let colorDots = true
|
||||
export let showTime = true
|
||||
|
@ -41,7 +41,7 @@ export function init(extraInitQuery = '') {
|
||||
normal, caution, alert,
|
||||
timestep, scope,
|
||||
aggregation,
|
||||
subClusters { name, peak, normal, caution, alert }
|
||||
subClusters { name, peak, normal, caution, alert, remove }
|
||||
}
|
||||
partitions
|
||||
subClusters {
|
||||
@ -49,9 +49,9 @@ export function init(extraInitQuery = '') {
|
||||
socketsPerNode
|
||||
coresPerSocket
|
||||
threadsPerCore
|
||||
flopRateScalar
|
||||
flopRateSimd
|
||||
memoryBandwidth
|
||||
flopRateScalar { unit { base, prefix }, value }
|
||||
flopRateSimd { unit { base, prefix }, value }
|
||||
memoryBandwidth { unit { base, prefix }, value }
|
||||
numberOfNodes
|
||||
topology {
|
||||
node, socket, core
|
||||
|
Loading…
Reference in New Issue
Block a user