feat: add select to status view pie charts

- 'Jobs' as generic default value for top lists
- Prepare histograms for cores and accs in schema
This commit is contained in:
Christoph Kluge 2023-08-29 14:01:01 +02:00
parent 69519ec040
commit f933cad87f
6 changed files with 267 additions and 58 deletions

View File

@ -167,7 +167,7 @@ type TimeWeights {
} }
enum Aggregate { USER, PROJECT, CLUSTER } enum Aggregate { USER, PROJECT, CLUSTER }
enum SortByAggregate { WALLTIME, TOTALNODES, NODEHOURS, TOTALCORES, COREHOURS, TOTALACCS, ACCHOURS } enum SortByAggregate { WALLTIME, TOTALJOBS, TOTALNODES, NODEHOURS, TOTALCORES, COREHOURS, TOTALACCS, ACCHOURS }
type NodeMetrics { type NodeMetrics {
host: String! host: String!
@ -301,6 +301,8 @@ type JobsStatistics {
totalAccHours: Int! # Sum of the gpu hours of all matched jobs totalAccHours: Int! # Sum of the gpu hours of all matched jobs
histDuration: [HistoPoint!]! # value: hour, count: number of jobs with a rounded duration of value histDuration: [HistoPoint!]! # value: hour, count: number of jobs with a rounded duration of value
histNumNodes: [HistoPoint!]! # value: number of nodes, count: number of jobs with that number of nodes histNumNodes: [HistoPoint!]! # value: number of nodes, count: number of jobs with that number of nodes
histNumCores: [HistoPoint!]! # value: number of cores, count: number of jobs with that number of cores
histNumAccs: [HistoPoint!]! # value: number of accs, count: number of jobs with that number of accs
} }
input PageRequest { input PageRequest {

View File

@ -141,6 +141,8 @@ type ComplexityRoot struct {
JobsStatistics struct { JobsStatistics struct {
HistDuration func(childComplexity int) int HistDuration func(childComplexity int) int
HistNumAccs func(childComplexity int) int
HistNumCores func(childComplexity int) int
HistNumNodes func(childComplexity int) int HistNumNodes func(childComplexity int) int
ID func(childComplexity int) int ID func(childComplexity int) int
Name func(childComplexity int) int Name func(childComplexity int) int
@ -728,6 +730,20 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
return e.complexity.JobsStatistics.HistDuration(childComplexity), true return e.complexity.JobsStatistics.HistDuration(childComplexity), true
case "JobsStatistics.histNumAccs":
if e.complexity.JobsStatistics.HistNumAccs == nil {
break
}
return e.complexity.JobsStatistics.HistNumAccs(childComplexity), true
case "JobsStatistics.histNumCores":
if e.complexity.JobsStatistics.HistNumCores == nil {
break
}
return e.complexity.JobsStatistics.HistNumCores(childComplexity), true
case "JobsStatistics.histNumNodes": case "JobsStatistics.histNumNodes":
if e.complexity.JobsStatistics.HistNumNodes == nil { if e.complexity.JobsStatistics.HistNumNodes == nil {
break break
@ -1751,7 +1767,7 @@ type TimeWeights {
} }
enum Aggregate { USER, PROJECT, CLUSTER } enum Aggregate { USER, PROJECT, CLUSTER }
enum SortByAggregate { WALLTIME, TOTALNODES, NODEHOURS, TOTALCORES, COREHOURS, TOTALACCS, ACCHOURS } enum SortByAggregate { WALLTIME, TOTALJOBS, TOTALNODES, NODEHOURS, TOTALCORES, COREHOURS, TOTALACCS, ACCHOURS }
type NodeMetrics { type NodeMetrics {
host: String! host: String!
@ -1885,6 +1901,8 @@ type JobsStatistics {
totalAccHours: Int! # Sum of the gpu hours of all matched jobs totalAccHours: Int! # Sum of the gpu hours of all matched jobs
histDuration: [HistoPoint!]! # value: hour, count: number of jobs with a rounded duration of value histDuration: [HistoPoint!]! # value: hour, count: number of jobs with a rounded duration of value
histNumNodes: [HistoPoint!]! # value: number of nodes, count: number of jobs with that number of nodes histNumNodes: [HistoPoint!]! # value: number of nodes, count: number of jobs with that number of nodes
histNumCores: [HistoPoint!]! # value: number of cores, count: number of jobs with that number of cores
histNumAccs: [HistoPoint!]! # value: number of accs, count: number of jobs with that number of accs
} }
input PageRequest { input PageRequest {
@ -5522,6 +5540,106 @@ func (ec *executionContext) fieldContext_JobsStatistics_histNumNodes(ctx context
return fc, nil return fc, nil
} }
func (ec *executionContext) _JobsStatistics_histNumCores(ctx context.Context, field graphql.CollectedField, obj *model.JobsStatistics) (ret graphql.Marshaler) {
fc, err := ec.fieldContext_JobsStatistics_histNumCores(ctx, field)
if err != nil {
return graphql.Null
}
ctx = graphql.WithFieldContext(ctx, fc)
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return obj.HistNumCores, nil
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
if !graphql.HasFieldError(ctx, fc) {
ec.Errorf(ctx, "must not be null")
}
return graphql.Null
}
res := resTmp.([]*model.HistoPoint)
fc.Result = res
return ec.marshalNHistoPoint2ᚕᚖgithubᚗcomᚋClusterCockpitᚋccᚑbackendᚋinternalᚋgraphᚋmodelᚐHistoPointᚄ(ctx, field.Selections, res)
}
func (ec *executionContext) fieldContext_JobsStatistics_histNumCores(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) {
fc = &graphql.FieldContext{
Object: "JobsStatistics",
Field: field,
IsMethod: false,
IsResolver: false,
Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) {
switch field.Name {
case "count":
return ec.fieldContext_HistoPoint_count(ctx, field)
case "value":
return ec.fieldContext_HistoPoint_value(ctx, field)
}
return nil, fmt.Errorf("no field named %q was found under type HistoPoint", field.Name)
},
}
return fc, nil
}
func (ec *executionContext) _JobsStatistics_histNumAccs(ctx context.Context, field graphql.CollectedField, obj *model.JobsStatistics) (ret graphql.Marshaler) {
fc, err := ec.fieldContext_JobsStatistics_histNumAccs(ctx, field)
if err != nil {
return graphql.Null
}
ctx = graphql.WithFieldContext(ctx, fc)
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return obj.HistNumAccs, nil
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
if !graphql.HasFieldError(ctx, fc) {
ec.Errorf(ctx, "must not be null")
}
return graphql.Null
}
res := resTmp.([]*model.HistoPoint)
fc.Result = res
return ec.marshalNHistoPoint2ᚕᚖgithubᚗcomᚋClusterCockpitᚋccᚑbackendᚋinternalᚋgraphᚋmodelᚐHistoPointᚄ(ctx, field.Selections, res)
}
func (ec *executionContext) fieldContext_JobsStatistics_histNumAccs(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) {
fc = &graphql.FieldContext{
Object: "JobsStatistics",
Field: field,
IsMethod: false,
IsResolver: false,
Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) {
switch field.Name {
case "count":
return ec.fieldContext_HistoPoint_count(ctx, field)
case "value":
return ec.fieldContext_HistoPoint_value(ctx, field)
}
return nil, fmt.Errorf("no field named %q was found under type HistoPoint", field.Name)
},
}
return fc, nil
}
func (ec *executionContext) _MetricConfig_name(ctx context.Context, field graphql.CollectedField, obj *schema.MetricConfig) (ret graphql.Marshaler) { func (ec *executionContext) _MetricConfig_name(ctx context.Context, field graphql.CollectedField, obj *schema.MetricConfig) (ret graphql.Marshaler) {
fc, err := ec.fieldContext_MetricConfig_name(ctx, field) fc, err := ec.fieldContext_MetricConfig_name(ctx, field)
if err != nil { if err != nil {
@ -7309,6 +7427,10 @@ func (ec *executionContext) fieldContext_Query_jobsStatistics(ctx context.Contex
return ec.fieldContext_JobsStatistics_histDuration(ctx, field) return ec.fieldContext_JobsStatistics_histDuration(ctx, field)
case "histNumNodes": case "histNumNodes":
return ec.fieldContext_JobsStatistics_histNumNodes(ctx, field) return ec.fieldContext_JobsStatistics_histNumNodes(ctx, field)
case "histNumCores":
return ec.fieldContext_JobsStatistics_histNumCores(ctx, field)
case "histNumAccs":
return ec.fieldContext_JobsStatistics_histNumAccs(ctx, field)
} }
return nil, fmt.Errorf("no field named %q was found under type JobsStatistics", field.Name) return nil, fmt.Errorf("no field named %q was found under type JobsStatistics", field.Name)
}, },
@ -12778,6 +12900,16 @@ func (ec *executionContext) _JobsStatistics(ctx context.Context, sel ast.Selecti
if out.Values[i] == graphql.Null { if out.Values[i] == graphql.Null {
out.Invalids++ out.Invalids++
} }
case "histNumCores":
out.Values[i] = ec._JobsStatistics_histNumCores(ctx, field, obj)
if out.Values[i] == graphql.Null {
out.Invalids++
}
case "histNumAccs":
out.Values[i] = ec._JobsStatistics_histNumAccs(ctx, field, obj)
if out.Values[i] == graphql.Null {
out.Invalids++
}
default: default:
panic("unknown field " + strconv.Quote(field.Name)) panic("unknown field " + strconv.Quote(field.Name))
} }

View File

@ -99,6 +99,8 @@ type JobsStatistics struct {
TotalAccHours int `json:"totalAccHours"` TotalAccHours int `json:"totalAccHours"`
HistDuration []*HistoPoint `json:"histDuration"` HistDuration []*HistoPoint `json:"histDuration"`
HistNumNodes []*HistoPoint `json:"histNumNodes"` HistNumNodes []*HistoPoint `json:"histNumNodes"`
HistNumCores []*HistoPoint `json:"histNumCores"`
HistNumAccs []*HistoPoint `json:"histNumAccs"`
} }
type MetricFootprints struct { type MetricFootprints struct {
@ -195,6 +197,7 @@ type SortByAggregate string
const ( const (
SortByAggregateWalltime SortByAggregate = "WALLTIME" SortByAggregateWalltime SortByAggregate = "WALLTIME"
SortByAggregateTotaljobs SortByAggregate = "TOTALJOBS"
SortByAggregateTotalnodes SortByAggregate = "TOTALNODES" SortByAggregateTotalnodes SortByAggregate = "TOTALNODES"
SortByAggregateNodehours SortByAggregate = "NODEHOURS" SortByAggregateNodehours SortByAggregate = "NODEHOURS"
SortByAggregateTotalcores SortByAggregate = "TOTALCORES" SortByAggregateTotalcores SortByAggregate = "TOTALCORES"
@ -205,6 +208,7 @@ const (
var AllSortByAggregate = []SortByAggregate{ var AllSortByAggregate = []SortByAggregate{
SortByAggregateWalltime, SortByAggregateWalltime,
SortByAggregateTotaljobs,
SortByAggregateTotalnodes, SortByAggregateTotalnodes,
SortByAggregateNodehours, SortByAggregateNodehours,
SortByAggregateTotalcores, SortByAggregateTotalcores,
@ -215,7 +219,7 @@ var AllSortByAggregate = []SortByAggregate{
func (e SortByAggregate) IsValid() bool { func (e SortByAggregate) IsValid() bool {
switch e { switch e {
case SortByAggregateWalltime, SortByAggregateTotalnodes, SortByAggregateNodehours, SortByAggregateTotalcores, SortByAggregateCorehours, SortByAggregateTotalaccs, SortByAggregateAcchours: case SortByAggregateWalltime, SortByAggregateTotaljobs, SortByAggregateTotalnodes, SortByAggregateNodehours, SortByAggregateTotalcores, SortByAggregateCorehours, SortByAggregateTotalaccs, SortByAggregateAcchours:
return true return true
} }
return false return false

View File

@ -24,6 +24,7 @@ var groupBy2column = map[model.Aggregate]string{
} }
var sortBy2column = map[model.SortByAggregate]string{ var sortBy2column = map[model.SortByAggregate]string{
model.SortByAggregateTotaljobs: "totalJobs",
model.SortByAggregateWalltime: "totalWalltime", model.SortByAggregateWalltime: "totalWalltime",
model.SortByAggregateTotalnodes: "totalNodes", model.SortByAggregateTotalnodes: "totalNodes",
model.SortByAggregateNodehours: "totalNodeHours", model.SortByAggregateNodehours: "totalNodeHours",
@ -71,7 +72,7 @@ func (r *JobRepository) buildStatsQuery(
if col != "" { if col != "" {
// Scan columns: id, totalJobs, totalWalltime, totalNodes, totalNodeHours, totalCores, totalCoreHours, totalAccs, totalAccHours // Scan columns: id, totalJobs, totalWalltime, totalNodes, totalNodeHours, totalCores, totalCoreHours, totalAccs, totalAccHours
query = sq.Select(col, "COUNT(job.id)", query = sq.Select(col, "COUNT(job.id) as totalJobs",
fmt.Sprintf("CAST(ROUND(SUM(job.duration) / 3600) as %s) as totalWalltime", castType), fmt.Sprintf("CAST(ROUND(SUM(job.duration) / 3600) as %s) as totalWalltime", castType),
fmt.Sprintf("CAST(SUM(job.num_nodes) as %s) as totalNodes", castType), fmt.Sprintf("CAST(SUM(job.num_nodes) as %s) as totalNodes", castType),
fmt.Sprintf("CAST(ROUND(SUM(job.duration * job.num_nodes) / 3600) as %s) as totalNodeHours", castType), fmt.Sprintf("CAST(ROUND(SUM(job.duration * job.num_nodes) / 3600) as %s) as totalNodeHours", castType),
@ -168,8 +169,15 @@ func (r *JobRepository) JobsStatsGrouped(
} }
if id.Valid { if id.Valid {
var totalCores, totalCoreHours, totalAccs, totalAccHours int var totalJobs, totalNodes, totalNodeHours, totalCores, totalCoreHours, totalAccs, totalAccHours int
if jobs.Valid {
totalJobs = int(jobs.Int64)
}
if nodes.Valid {
totalNodes = int(nodes.Int64)
}
if cores.Valid { if cores.Valid {
totalCores = int(cores.Int64) totalCores = int(cores.Int64)
} }
@ -177,6 +185,9 @@ func (r *JobRepository) JobsStatsGrouped(
totalAccs = int(accs.Int64) totalAccs = int(accs.Int64)
} }
if nodeHours.Valid {
totalNodeHours = int(nodeHours.Int64)
}
if coreHours.Valid { if coreHours.Valid {
totalCoreHours = int(coreHours.Int64) totalCoreHours = int(coreHours.Int64)
} }
@ -190,8 +201,10 @@ func (r *JobRepository) JobsStatsGrouped(
&model.JobsStatistics{ &model.JobsStatistics{
ID: id.String, ID: id.String,
Name: name, Name: name,
TotalJobs: int(jobs.Int64), TotalJobs: totalJobs,
TotalWalltime: int(walltime.Int64), TotalWalltime: int(walltime.Int64),
TotalNodes: totalNodes,
TotalNodeHours: totalNodeHours,
TotalCores: totalCores, TotalCores: totalCores,
TotalCoreHours: totalCoreHours, TotalCoreHours: totalCoreHours,
TotalAccs: totalAccs, TotalAccs: totalAccs,
@ -202,6 +215,8 @@ func (r *JobRepository) JobsStatsGrouped(
ID: id.String, ID: id.String,
TotalJobs: int(jobs.Int64), TotalJobs: int(jobs.Int64),
TotalWalltime: int(walltime.Int64), TotalWalltime: int(walltime.Int64),
TotalNodes: totalNodes,
TotalNodeHours: totalNodeHours,
TotalCores: totalCores, TotalCores: totalCores,
TotalCoreHours: totalCoreHours, TotalCoreHours: totalCoreHours,
TotalAccs: totalAccs, TotalAccs: totalAccs,
@ -228,16 +243,11 @@ func (r *JobRepository) jobsStats(
} }
if jobs.Valid { if jobs.Valid {
var totalCoreHours, totalAccHours int var totalNodeHours, totalCoreHours, totalAccHours int
// var totalCores, totalAccs int
// if cores.Valid {
// totalCores = int(cores.Int64)
// }
// if accs.Valid {
// totalAccs = int(accs.Int64)
// }
if nodeHours.Valid {
totalNodeHours = int(nodeHours.Int64)
}
if coreHours.Valid { if coreHours.Valid {
totalCoreHours = int(coreHours.Int64) totalCoreHours = int(coreHours.Int64)
} }
@ -248,6 +258,7 @@ func (r *JobRepository) jobsStats(
&model.JobsStatistics{ &model.JobsStatistics{
TotalJobs: int(jobs.Int64), TotalJobs: int(jobs.Int64),
TotalWalltime: int(walltime.Int64), TotalWalltime: int(walltime.Int64),
TotalNodeHours: totalNodeHours,
TotalCoreHours: totalCoreHours, TotalCoreHours: totalCoreHours,
TotalAccHours: totalAccHours}) TotalAccHours: totalAccHours})
} }

View File

@ -14,6 +14,14 @@
let plotWidths = [], colWidth1 = 0, colWidth2 let plotWidths = [], colWidth1 = 0, colWidth2
let from = new Date(Date.now() - 5 * 60 * 1000), to = new Date(Date.now()) let from = new Date(Date.now() - 5 * 60 * 1000), to = new Date(Date.now())
const topOptions = [
{key: 'totalJobs', label: 'Jobs'},
{key: 'totalNodes', label: 'Nodes'},
{key: 'totalCores', label: 'Cores'},
{key: 'totalAccs', label: 'Accelerators'},
]
let topProjectSelection = topOptions[0] // Default: Jobs
let topUserSelection = topOptions[0] // Default: Jobs
const client = getContextClient(); const client = getContextClient();
$: mainQuery = queryStore({ $: mainQuery = queryStore({
@ -51,29 +59,33 @@
$: topUserQuery = queryStore({ $: topUserQuery = queryStore({
client: client, client: client,
query: gql` query: gql`
query($filter: [JobFilter!]!, $paging: PageRequest!) { query($filter: [JobFilter!]!, $paging: PageRequest!, $sortBy: SortByAggregate!) {
topUser: jobsStatistics(filter: $filter, page: $paging, sortBy: TOTALCORES, groupBy: USER) { topUser: jobsStatistics(filter: $filter, page: $paging, sortBy: $sortBy, groupBy: USER) {
id id
totalJobs
totalNodes totalNodes
totalCores totalCores
totalAccs
} }
} }
`, `,
variables: { filter: [{ state: ['running'] }, { cluster: { eq: cluster } }], paging } variables: { filter: [{ state: ['running'] }, { cluster: { eq: cluster } }], paging, sortBy: topUserSelection.key.toUpperCase() }
}) })
$: topProjectQuery = queryStore({ $: topProjectQuery = queryStore({
client: client, client: client,
query: gql` query: gql`
query($filter: [JobFilter!]!, $paging: PageRequest!) { query($filter: [JobFilter!]!, $paging: PageRequest!, $sortBy: SortByAggregate!) {
topProjects: jobsStatistics(filter: $filter, page: $paging, sortBy: TOTALCORES, groupBy: PROJECT) { topProjects: jobsStatistics(filter: $filter, page: $paging, sortBy: $sortBy, groupBy: PROJECT) {
id id
totalJobs
totalNodes totalNodes
totalCores totalCores
totalAccs
} }
} }
`, `,
variables: { filter: [{ state: ['running'] }, { cluster: { eq: cluster } }], paging } variables: { filter: [{ state: ['running'] }, { cluster: { eq: cluster } }], paging, sortBy: topProjectSelection.key.toUpperCase() }
}) })
const sumUp = (data, subcluster, metric) => data.reduce((sum, node) => node.subCluster == subcluster const sumUp = (data, subcluster, metric) => data.reduce((sum, node) => node.subCluster == subcluster
@ -188,51 +200,99 @@
<Row cols={4}> <Row cols={4}>
<Col class="p-2"> <Col class="p-2">
<div bind:clientWidth={colWidth1}> <div bind:clientWidth={colWidth1}>
<h4 class="text-center">Top Users</h4> <h4 class="text-center">Top Users on {cluster.charAt(0).toUpperCase() + cluster.slice(1)}</h4>
{#key $topUserQuery.data} {#if $topUserQuery.fetching}
<Spinner/>
{:else if $topUserQuery.error}
<Card body color="danger">{$topUserQuery.error.message}</Card>
{:else}
<Pie <Pie
size={colWidth1} size={colWidth1}
sliceLabel='Jobs' sliceLabel={topUserSelection.label}
quantities={$topUserQuery.data.topUser.map((tu) => tu.totalCores)} quantities={$topUserQuery.data.topUser.map((tu) => tu[topUserSelection.key])}
entities={$topUserQuery.data.topUser.map((tu) => tu.id)} entities={$topUserQuery.data.topUser.map((tu) => tu.id)}
/> />
{/key} {/if}
</div> </div>
</Col> </Col>
<Col class="px-4 py-2"> <Col class="px-4 py-2">
<Table> {#key $topUserQuery.data}
<tr class="mb-2"><th>Legend</th><th>User Name</th><th>Number of Cores</th></tr> {#if $topUserQuery.fetching}
{#each $topUserQuery.data.topUser as { id, totalCores, totalNodes }, i} <Spinner/>
<tr> {:else if $topUserQuery.error}
<td><Icon name="circle-fill" style="color: {colors[i]};"/></td> <Card body color="danger">{$topUserQuery.error.message}</Card>
<th scope="col"><a href="/monitoring/user/{id}?cluster={cluster}&state=running">{id}</a></th> {:else}
<td>{totalCores}</td> <Table>
</tr> <tr class="mb-2">
{/each} <th>Legend</th>
</Table> <th>User Name</th>
</Col> <th>Number of
<Col class="p-2"> <select class="p-0" bind:value={topUserSelection}>
<h4 class="text-center">Top Projects</h4> {#each topOptions as option}
{#key $topProjectQuery.data} <option value={option}>
<Pie {option.label}
size={colWidth1} </option>
sliceLabel='Jobs' {/each}
quantities={$topProjectQuery.data.topProjects.map((tp) => tp.totalCores)} </select>
entities={$topProjectQuery.data.topProjects.map((tp) => tp.id)} </th>
/> </tr>
{#each $topUserQuery.data.topUser as tu, i}
<tr>
<td><Icon name="circle-fill" style="color: {colors[i]};"/></td>
<th scope="col"><a href="/monitoring/user/{tu.id}?cluster={cluster}&state=running">{tu.id}</a></th>
<td>{tu[topUserSelection.key]}</td>
</tr>
{/each}
</Table>
{/if}
{/key} {/key}
</Col> </Col>
<Col class="p-2">
<h4 class="text-center">Top Projects on {cluster.charAt(0).toUpperCase() + cluster.slice(1)}</h4>
{#if $topProjectQuery.fetching}
<Spinner/>
{:else if $topProjectQuery.error}
<Card body color="danger">{$topProjectQuery.error.message}</Card>
{:else}
<Pie
size={colWidth1}
sliceLabel={topProjectSelection.label}
quantities={$topProjectQuery.data.topProjects.map((tp) => tp[topProjectSelection.key])}
entities={$topProjectQuery.data.topProjects.map((tp) => tp.id)}
/>
{/if}
</Col>
<Col class="px-4 py-2"> <Col class="px-4 py-2">
<Table> {#key $topProjectQuery.data}
<tr class="mb-2"><th>Legend</th><th>Project Code</th><th>Number of Cores</th></tr> {#if $topProjectQuery.fetching}
{#each $topProjectQuery.data.topProjects as { id, totalCores, totalNodes }, i} <Spinner/>
<tr> {:else if $topProjectQuery.error}
<td><Icon name="circle-fill" style="color: {colors[i]};"/></td> <Card body color="danger">{$topProjectQuery.error.message}</Card>
<th scope="col"><a href="/monitoring/jobs/?cluster={cluster}&state=running&project={id}&projectMatch=eq">{id}</a></th> {:else}
<td>{totalCores}</td> <Table>
</tr> <tr class="mb-2">
{/each} <th>Legend</th>
</Table> <th>Project Code</th>
<th>Number of
<select class="p-0" bind:value={topProjectSelection}>
{#each topOptions as option}
<option value={option}>
{option.label}
</option>
{/each}
</select>
</th>
</tr>
{#each $topProjectQuery.data.topProjects as tp, i}
<tr>
<td><Icon name="circle-fill" style="color: {colors[i]};"/></td>
<th scope="col"><a href="/monitoring/jobs/?cluster={cluster}&state=running&project={tp.id}&projectMatch=eq">{tp.id}</a></th>
<td>{tp[topProjectSelection.key]}</td>
</tr>
{/each}
</Table>
{/if}
{/key}
</Col> </Col>
</Row> </Row>
<hr class="my-2"/> <hr class="my-2"/>

View File

@ -43,14 +43,14 @@
export let entities export let entities
export let displayLegend = false export let displayLegend = false
const data = { $: data = {
labels: entities, labels: entities,
datasets: [ datasets: [
{ {
label: sliceLabel, label: sliceLabel,
data: quantities, data: quantities,
fill: 1, fill: 1,
backgroundColor: colors.slice(0, quantities.length), backgroundColor: colors.slice(0, quantities.length)
} }
] ]
} }