change: implement topX query in jobsStatistics

This commit is contained in:
Christoph Kluge 2023-08-25 17:38:25 +02:00
parent 1d0db276e8
commit ce758610b6
6 changed files with 293 additions and 41 deletions

View File

@ -167,7 +167,7 @@ type TimeWeights {
} }
enum Aggregate { USER, PROJECT, CLUSTER } enum Aggregate { USER, PROJECT, CLUSTER }
enum SortByAggregate { WALLTIME, NODEHOURS, COREHOURS, ACCHOURS } enum SortByAggregate { WALLTIME, TOTALNODES, NODEHOURS, TOTALCORES, COREHOURS, TOTALACCS, ACCHOURS }
type NodeMetrics { type NodeMetrics {
host: String! host: String!
@ -293,8 +293,11 @@ type JobsStatistics {
runningJobs: Int! # Number of running jobs runningJobs: Int! # Number of running jobs
shortJobs: Int! # Number of jobs with a duration of less than duration shortJobs: Int! # Number of jobs with a duration of less than duration
totalWalltime: Int! # Sum of the duration of all matched jobs in hours totalWalltime: Int! # Sum of the duration of all matched jobs in hours
totalNodes: Int! # Sum of the nodes of all matched jobs
totalNodeHours: Int! # Sum of the node hours of all matched jobs totalNodeHours: Int! # Sum of the node hours of all matched jobs
totalCores: Int! # Sum of the cores of all matched jobs
totalCoreHours: Int! # Sum of the core hours of all matched jobs totalCoreHours: Int! # Sum of the core hours of all matched jobs
totalAccs: Int! # Sum of the accs of all matched jobs
totalAccHours: Int! # Sum of the gpu hours of all matched jobs totalAccHours: Int! # Sum of the gpu hours of all matched jobs
histDuration: [HistoPoint!]! # value: hour, count: number of jobs with a rounded duration of value histDuration: [HistoPoint!]! # value: hour, count: number of jobs with a rounded duration of value
histNumNodes: [HistoPoint!]! # value: number of nodes, count: number of jobs with that number of nodes histNumNodes: [HistoPoint!]! # value: number of nodes, count: number of jobs with that number of nodes

View File

@ -147,9 +147,12 @@ type ComplexityRoot struct {
RunningJobs func(childComplexity int) int RunningJobs func(childComplexity int) int
ShortJobs func(childComplexity int) int ShortJobs func(childComplexity int) int
TotalAccHours func(childComplexity int) int TotalAccHours func(childComplexity int) int
TotalAccs func(childComplexity int) int
TotalCoreHours func(childComplexity int) int TotalCoreHours func(childComplexity int) int
TotalCores func(childComplexity int) int
TotalJobs func(childComplexity int) int TotalJobs func(childComplexity int) int
TotalNodeHours func(childComplexity int) int TotalNodeHours func(childComplexity int) int
TotalNodes func(childComplexity int) int
TotalWalltime func(childComplexity int) int TotalWalltime func(childComplexity int) int
} }
@ -767,6 +770,13 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
return e.complexity.JobsStatistics.TotalAccHours(childComplexity), true return e.complexity.JobsStatistics.TotalAccHours(childComplexity), true
case "JobsStatistics.totalAccs":
if e.complexity.JobsStatistics.TotalAccs == nil {
break
}
return e.complexity.JobsStatistics.TotalAccs(childComplexity), true
case "JobsStatistics.totalCoreHours": case "JobsStatistics.totalCoreHours":
if e.complexity.JobsStatistics.TotalCoreHours == nil { if e.complexity.JobsStatistics.TotalCoreHours == nil {
break break
@ -774,6 +784,13 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
return e.complexity.JobsStatistics.TotalCoreHours(childComplexity), true return e.complexity.JobsStatistics.TotalCoreHours(childComplexity), true
case "JobsStatistics.totalCores":
if e.complexity.JobsStatistics.TotalCores == nil {
break
}
return e.complexity.JobsStatistics.TotalCores(childComplexity), true
case "JobsStatistics.totalJobs": case "JobsStatistics.totalJobs":
if e.complexity.JobsStatistics.TotalJobs == nil { if e.complexity.JobsStatistics.TotalJobs == nil {
break break
@ -788,6 +805,13 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
return e.complexity.JobsStatistics.TotalNodeHours(childComplexity), true return e.complexity.JobsStatistics.TotalNodeHours(childComplexity), true
case "JobsStatistics.totalNodes":
if e.complexity.JobsStatistics.TotalNodes == nil {
break
}
return e.complexity.JobsStatistics.TotalNodes(childComplexity), true
case "JobsStatistics.totalWalltime": case "JobsStatistics.totalWalltime":
if e.complexity.JobsStatistics.TotalWalltime == nil { if e.complexity.JobsStatistics.TotalWalltime == nil {
break break
@ -1727,7 +1751,7 @@ type TimeWeights {
} }
enum Aggregate { USER, PROJECT, CLUSTER } enum Aggregate { USER, PROJECT, CLUSTER }
enum SortByAggregate { WALLTIME, NODEHOURS, COREHOURS, ACCHOURS } enum SortByAggregate { WALLTIME, TOTALNODES, NODEHOURS, TOTALCORES, COREHOURS, TOTALACCS, ACCHOURS }
type NodeMetrics { type NodeMetrics {
host: String! host: String!
@ -1853,8 +1877,11 @@ type JobsStatistics {
runningJobs: Int! # Number of running jobs runningJobs: Int! # Number of running jobs
shortJobs: Int! # Number of jobs with a duration of less than duration shortJobs: Int! # Number of jobs with a duration of less than duration
totalWalltime: Int! # Sum of the duration of all matched jobs in hours totalWalltime: Int! # Sum of the duration of all matched jobs in hours
totalNodes: Int! # Sum of the nodes of all matched jobs
totalNodeHours: Int! # Sum of the node hours of all matched jobs totalNodeHours: Int! # Sum of the node hours of all matched jobs
totalCores: Int! # Sum of the cores of all matched jobs
totalCoreHours: Int! # Sum of the core hours of all matched jobs totalCoreHours: Int! # Sum of the core hours of all matched jobs
totalAccs: Int! # Sum of the accs of all matched jobs
totalAccHours: Int! # Sum of the gpu hours of all matched jobs totalAccHours: Int! # Sum of the gpu hours of all matched jobs
histDuration: [HistoPoint!]! # value: hour, count: number of jobs with a rounded duration of value histDuration: [HistoPoint!]! # value: hour, count: number of jobs with a rounded duration of value
histNumNodes: [HistoPoint!]! # value: number of nodes, count: number of jobs with that number of nodes histNumNodes: [HistoPoint!]! # value: number of nodes, count: number of jobs with that number of nodes
@ -5131,6 +5158,50 @@ func (ec *executionContext) fieldContext_JobsStatistics_totalWalltime(ctx contex
return fc, nil return fc, nil
} }
func (ec *executionContext) _JobsStatistics_totalNodes(ctx context.Context, field graphql.CollectedField, obj *model.JobsStatistics) (ret graphql.Marshaler) {
fc, err := ec.fieldContext_JobsStatistics_totalNodes(ctx, field)
if err != nil {
return graphql.Null
}
ctx = graphql.WithFieldContext(ctx, fc)
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return obj.TotalNodes, nil
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
if !graphql.HasFieldError(ctx, fc) {
ec.Errorf(ctx, "must not be null")
}
return graphql.Null
}
res := resTmp.(int)
fc.Result = res
return ec.marshalNInt2int(ctx, field.Selections, res)
}
func (ec *executionContext) fieldContext_JobsStatistics_totalNodes(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) {
fc = &graphql.FieldContext{
Object: "JobsStatistics",
Field: field,
IsMethod: false,
IsResolver: false,
Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) {
return nil, errors.New("field of type Int does not have child fields")
},
}
return fc, nil
}
func (ec *executionContext) _JobsStatistics_totalNodeHours(ctx context.Context, field graphql.CollectedField, obj *model.JobsStatistics) (ret graphql.Marshaler) { func (ec *executionContext) _JobsStatistics_totalNodeHours(ctx context.Context, field graphql.CollectedField, obj *model.JobsStatistics) (ret graphql.Marshaler) {
fc, err := ec.fieldContext_JobsStatistics_totalNodeHours(ctx, field) fc, err := ec.fieldContext_JobsStatistics_totalNodeHours(ctx, field)
if err != nil { if err != nil {
@ -5175,6 +5246,50 @@ func (ec *executionContext) fieldContext_JobsStatistics_totalNodeHours(ctx conte
return fc, nil return fc, nil
} }
func (ec *executionContext) _JobsStatistics_totalCores(ctx context.Context, field graphql.CollectedField, obj *model.JobsStatistics) (ret graphql.Marshaler) {
fc, err := ec.fieldContext_JobsStatistics_totalCores(ctx, field)
if err != nil {
return graphql.Null
}
ctx = graphql.WithFieldContext(ctx, fc)
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return obj.TotalCores, nil
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
if !graphql.HasFieldError(ctx, fc) {
ec.Errorf(ctx, "must not be null")
}
return graphql.Null
}
res := resTmp.(int)
fc.Result = res
return ec.marshalNInt2int(ctx, field.Selections, res)
}
func (ec *executionContext) fieldContext_JobsStatistics_totalCores(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) {
fc = &graphql.FieldContext{
Object: "JobsStatistics",
Field: field,
IsMethod: false,
IsResolver: false,
Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) {
return nil, errors.New("field of type Int does not have child fields")
},
}
return fc, nil
}
func (ec *executionContext) _JobsStatistics_totalCoreHours(ctx context.Context, field graphql.CollectedField, obj *model.JobsStatistics) (ret graphql.Marshaler) { func (ec *executionContext) _JobsStatistics_totalCoreHours(ctx context.Context, field graphql.CollectedField, obj *model.JobsStatistics) (ret graphql.Marshaler) {
fc, err := ec.fieldContext_JobsStatistics_totalCoreHours(ctx, field) fc, err := ec.fieldContext_JobsStatistics_totalCoreHours(ctx, field)
if err != nil { if err != nil {
@ -5219,6 +5334,50 @@ func (ec *executionContext) fieldContext_JobsStatistics_totalCoreHours(ctx conte
return fc, nil return fc, nil
} }
func (ec *executionContext) _JobsStatistics_totalAccs(ctx context.Context, field graphql.CollectedField, obj *model.JobsStatistics) (ret graphql.Marshaler) {
fc, err := ec.fieldContext_JobsStatistics_totalAccs(ctx, field)
if err != nil {
return graphql.Null
}
ctx = graphql.WithFieldContext(ctx, fc)
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return obj.TotalAccs, nil
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
if !graphql.HasFieldError(ctx, fc) {
ec.Errorf(ctx, "must not be null")
}
return graphql.Null
}
res := resTmp.(int)
fc.Result = res
return ec.marshalNInt2int(ctx, field.Selections, res)
}
func (ec *executionContext) fieldContext_JobsStatistics_totalAccs(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) {
fc = &graphql.FieldContext{
Object: "JobsStatistics",
Field: field,
IsMethod: false,
IsResolver: false,
Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) {
return nil, errors.New("field of type Int does not have child fields")
},
}
return fc, nil
}
func (ec *executionContext) _JobsStatistics_totalAccHours(ctx context.Context, field graphql.CollectedField, obj *model.JobsStatistics) (ret graphql.Marshaler) { func (ec *executionContext) _JobsStatistics_totalAccHours(ctx context.Context, field graphql.CollectedField, obj *model.JobsStatistics) (ret graphql.Marshaler) {
fc, err := ec.fieldContext_JobsStatistics_totalAccHours(ctx, field) fc, err := ec.fieldContext_JobsStatistics_totalAccHours(ctx, field)
if err != nil { if err != nil {
@ -7134,10 +7293,16 @@ func (ec *executionContext) fieldContext_Query_jobsStatistics(ctx context.Contex
return ec.fieldContext_JobsStatistics_shortJobs(ctx, field) return ec.fieldContext_JobsStatistics_shortJobs(ctx, field)
case "totalWalltime": case "totalWalltime":
return ec.fieldContext_JobsStatistics_totalWalltime(ctx, field) return ec.fieldContext_JobsStatistics_totalWalltime(ctx, field)
case "totalNodes":
return ec.fieldContext_JobsStatistics_totalNodes(ctx, field)
case "totalNodeHours": case "totalNodeHours":
return ec.fieldContext_JobsStatistics_totalNodeHours(ctx, field) return ec.fieldContext_JobsStatistics_totalNodeHours(ctx, field)
case "totalCores":
return ec.fieldContext_JobsStatistics_totalCores(ctx, field)
case "totalCoreHours": case "totalCoreHours":
return ec.fieldContext_JobsStatistics_totalCoreHours(ctx, field) return ec.fieldContext_JobsStatistics_totalCoreHours(ctx, field)
case "totalAccs":
return ec.fieldContext_JobsStatistics_totalAccs(ctx, field)
case "totalAccHours": case "totalAccHours":
return ec.fieldContext_JobsStatistics_totalAccHours(ctx, field) return ec.fieldContext_JobsStatistics_totalAccHours(ctx, field)
case "histDuration": case "histDuration":
@ -12573,16 +12738,31 @@ func (ec *executionContext) _JobsStatistics(ctx context.Context, sel ast.Selecti
if out.Values[i] == graphql.Null { if out.Values[i] == graphql.Null {
out.Invalids++ out.Invalids++
} }
case "totalNodes":
out.Values[i] = ec._JobsStatistics_totalNodes(ctx, field, obj)
if out.Values[i] == graphql.Null {
out.Invalids++
}
case "totalNodeHours": case "totalNodeHours":
out.Values[i] = ec._JobsStatistics_totalNodeHours(ctx, field, obj) out.Values[i] = ec._JobsStatistics_totalNodeHours(ctx, field, obj)
if out.Values[i] == graphql.Null { if out.Values[i] == graphql.Null {
out.Invalids++ out.Invalids++
} }
case "totalCores":
out.Values[i] = ec._JobsStatistics_totalCores(ctx, field, obj)
if out.Values[i] == graphql.Null {
out.Invalids++
}
case "totalCoreHours": case "totalCoreHours":
out.Values[i] = ec._JobsStatistics_totalCoreHours(ctx, field, obj) out.Values[i] = ec._JobsStatistics_totalCoreHours(ctx, field, obj)
if out.Values[i] == graphql.Null { if out.Values[i] == graphql.Null {
out.Invalids++ out.Invalids++
} }
case "totalAccs":
out.Values[i] = ec._JobsStatistics_totalAccs(ctx, field, obj)
if out.Values[i] == graphql.Null {
out.Invalids++
}
case "totalAccHours": case "totalAccHours":
out.Values[i] = ec._JobsStatistics_totalAccHours(ctx, field, obj) out.Values[i] = ec._JobsStatistics_totalAccHours(ctx, field, obj)
if out.Values[i] == graphql.Null { if out.Values[i] == graphql.Null {

View File

@ -91,8 +91,11 @@ type JobsStatistics struct {
RunningJobs int `json:"runningJobs"` RunningJobs int `json:"runningJobs"`
ShortJobs int `json:"shortJobs"` ShortJobs int `json:"shortJobs"`
TotalWalltime int `json:"totalWalltime"` TotalWalltime int `json:"totalWalltime"`
TotalNodes int `json:"totalNodes"`
TotalNodeHours int `json:"totalNodeHours"` TotalNodeHours int `json:"totalNodeHours"`
TotalCores int `json:"totalCores"`
TotalCoreHours int `json:"totalCoreHours"` TotalCoreHours int `json:"totalCoreHours"`
TotalAccs int `json:"totalAccs"`
TotalAccHours int `json:"totalAccHours"` TotalAccHours int `json:"totalAccHours"`
HistDuration []*HistoPoint `json:"histDuration"` HistDuration []*HistoPoint `json:"histDuration"`
HistNumNodes []*HistoPoint `json:"histNumNodes"` HistNumNodes []*HistoPoint `json:"histNumNodes"`
@ -192,21 +195,27 @@ type SortByAggregate string
const ( const (
SortByAggregateWalltime SortByAggregate = "WALLTIME" SortByAggregateWalltime SortByAggregate = "WALLTIME"
SortByAggregateTotalnodes SortByAggregate = "TOTALNODES"
SortByAggregateNodehours SortByAggregate = "NODEHOURS" SortByAggregateNodehours SortByAggregate = "NODEHOURS"
SortByAggregateTotalcores SortByAggregate = "TOTALCORES"
SortByAggregateCorehours SortByAggregate = "COREHOURS" SortByAggregateCorehours SortByAggregate = "COREHOURS"
SortByAggregateTotalaccs SortByAggregate = "TOTALACCS"
SortByAggregateAcchours SortByAggregate = "ACCHOURS" SortByAggregateAcchours SortByAggregate = "ACCHOURS"
) )
var AllSortByAggregate = []SortByAggregate{ var AllSortByAggregate = []SortByAggregate{
SortByAggregateWalltime, SortByAggregateWalltime,
SortByAggregateTotalnodes,
SortByAggregateNodehours, SortByAggregateNodehours,
SortByAggregateTotalcores,
SortByAggregateCorehours, SortByAggregateCorehours,
SortByAggregateTotalaccs,
SortByAggregateAcchours, SortByAggregateAcchours,
} }
func (e SortByAggregate) IsValid() bool { func (e SortByAggregate) IsValid() bool {
switch e { switch e {
case SortByAggregateWalltime, SortByAggregateNodehours, SortByAggregateCorehours, SortByAggregateAcchours: case SortByAggregateWalltime, SortByAggregateTotalnodes, SortByAggregateNodehours, SortByAggregateTotalcores, SortByAggregateCorehours, SortByAggregateTotalaccs, SortByAggregateAcchours:
return true return true
} }
return false return false

View File

@ -25,8 +25,11 @@ var groupBy2column = map[model.Aggregate]string{
var sortBy2column = map[model.SortByAggregate]string{ var sortBy2column = map[model.SortByAggregate]string{
model.SortByAggregateWalltime: "totalWalltime", model.SortByAggregateWalltime: "totalWalltime",
model.SortByAggregateTotalnodes: "totalNodes",
model.SortByAggregateNodehours: "totalNodeHours", model.SortByAggregateNodehours: "totalNodeHours",
model.SortByAggregateTotalcores: "totalCores",
model.SortByAggregateCorehours: "totalCoreHours", model.SortByAggregateCorehours: "totalCoreHours",
model.SortByAggregateTotalaccs: "totalAccs",
model.SortByAggregateAcchours: "totalAccHours", model.SortByAggregateAcchours: "totalAccHours",
} }
@ -67,20 +70,26 @@ func (r *JobRepository) buildStatsQuery(
castType := r.getCastType() castType := r.getCastType()
if col != "" { if col != "" {
// Scan columns: id, totalJobs, totalWalltime, totalNodeHours, totalCoreHours, totalAccHours // Scan columns: id, totalJobs, totalWalltime, totalNodes, totalNodeHours, totalCores, totalCoreHours, totalAccs, totalAccHours
query = sq.Select(col, "COUNT(job.id)", query = sq.Select(col, "COUNT(job.id)",
fmt.Sprintf("CAST(ROUND(SUM(job.duration) / 3600) as %s) as totalWalltime", castType), fmt.Sprintf("CAST(ROUND(SUM(job.duration) / 3600) as %s) as totalWalltime", castType),
fmt.Sprintf("CAST(SUM(job.num_nodes) as %s) as totalNodes", castType),
fmt.Sprintf("CAST(ROUND(SUM(job.duration * job.num_nodes) / 3600) as %s) as totalNodeHours", castType), fmt.Sprintf("CAST(ROUND(SUM(job.duration * job.num_nodes) / 3600) as %s) as totalNodeHours", castType),
fmt.Sprintf("CAST(SUM(job.num_hwthreads) as %s) as totalCores", castType),
fmt.Sprintf("CAST(ROUND(SUM(job.duration * job.num_hwthreads) / 3600) as %s) as totalCoreHours", castType), fmt.Sprintf("CAST(ROUND(SUM(job.duration * job.num_hwthreads) / 3600) as %s) as totalCoreHours", castType),
fmt.Sprintf("CAST(SUM(job.num_acc) as %s) as totalAccs", castType),
fmt.Sprintf("CAST(ROUND(SUM(job.duration * job.num_acc) / 3600) as %s) as totalAccHours", castType), fmt.Sprintf("CAST(ROUND(SUM(job.duration * job.num_acc) / 3600) as %s) as totalAccHours", castType),
).From("job").GroupBy(col) ).From("job").GroupBy(col)
} else { } else {
// Scan columns: totalJobs, totalWalltime, totalNodeHours, totalCoreHours, totalAccHours // Scan columns: totalJobs, totalWalltime, totalNodes, totalNodeHours, totalCores, totalCoreHours, totalAccs, totalAccHours
query = sq.Select("COUNT(job.id)", query = sq.Select("COUNT(job.id)",
fmt.Sprintf("CAST(ROUND(SUM(job.duration) / 3600) as %s)", castType), fmt.Sprintf("CAST(ROUND(SUM(job.duration) / 3600) as %s)", castType),
fmt.Sprintf("CAST(SUM(job.num_nodes) as %s)", castType),
fmt.Sprintf("CAST(ROUND(SUM(job.duration * job.num_nodes) / 3600) as %s)", castType), fmt.Sprintf("CAST(ROUND(SUM(job.duration * job.num_nodes) / 3600) as %s)", castType),
fmt.Sprintf("CAST(SUM(job.num_hwthreads) as %s)", castType),
fmt.Sprintf("CAST(ROUND(SUM(job.duration * job.num_hwthreads) / 3600) as %s)", castType), fmt.Sprintf("CAST(ROUND(SUM(job.duration * job.num_hwthreads) / 3600) as %s)", castType),
fmt.Sprintf("CAST(SUM(job.num_acc) as %s)", castType),
fmt.Sprintf("CAST(ROUND(SUM(job.duration * job.num_acc) / 3600) as %s)", castType), fmt.Sprintf("CAST(ROUND(SUM(job.duration * job.num_acc) / 3600) as %s)", castType),
).From("job") ).From("job")
} }
@ -152,14 +161,21 @@ func (r *JobRepository) JobsStatsGrouped(
for rows.Next() { for rows.Next() {
var id sql.NullString var id sql.NullString
var jobs, walltime, nodeHours, coreHours, accHours sql.NullInt64 var jobs, walltime, nodes, nodeHours, cores, coreHours, accs, accHours sql.NullInt64
if err := rows.Scan(&id, &jobs, &walltime, &nodeHours, &coreHours, &accHours); err != nil { if err := rows.Scan(&id, &jobs, &walltime, &nodes, &nodeHours, &cores, &coreHours, &accs, &accHours); err != nil {
log.Warn("Error while scanning rows") log.Warn("Error while scanning rows")
return nil, err return nil, err
} }
if id.Valid { if id.Valid {
var totalCoreHours, totalAccHours int var totalCores, totalCoreHours, totalAccs, totalAccHours int
if cores.Valid {
totalCores = int(cores.Int64)
}
if accs.Valid {
totalAccs = int(accs.Int64)
}
if coreHours.Valid { if coreHours.Valid {
totalCoreHours = int(coreHours.Int64) totalCoreHours = int(coreHours.Int64)
@ -176,7 +192,9 @@ func (r *JobRepository) JobsStatsGrouped(
Name: name, Name: name,
TotalJobs: int(jobs.Int64), TotalJobs: int(jobs.Int64),
TotalWalltime: int(walltime.Int64), TotalWalltime: int(walltime.Int64),
TotalCores: totalCores,
TotalCoreHours: totalCoreHours, TotalCoreHours: totalCoreHours,
TotalAccs: totalAccs,
TotalAccHours: totalAccHours}) TotalAccHours: totalAccHours})
} else { } else {
stats = append(stats, stats = append(stats,
@ -184,7 +202,9 @@ func (r *JobRepository) JobsStatsGrouped(
ID: id.String, ID: id.String,
TotalJobs: int(jobs.Int64), TotalJobs: int(jobs.Int64),
TotalWalltime: int(walltime.Int64), TotalWalltime: int(walltime.Int64),
TotalCores: totalCores,
TotalCoreHours: totalCoreHours, TotalCoreHours: totalCoreHours,
TotalAccs: totalAccs,
TotalAccHours: totalAccHours}) TotalAccHours: totalAccHours})
} }
} }

View File

@ -66,13 +66,26 @@
histDuration { count, value } histDuration { count, value }
histNumNodes { count, value } histNumNodes { count, value }
} }
topUsers: jobsCount(filter: $jobFilters, groupBy: USER, weight: CORE_HOURS, limit: 5) { name, count }
} }
`, `,
variables: { jobFilters } variables: { jobFilters }
}) })
const paging = { itemsPerPage: 5, page: 1 }; // Top 5
// const sorting = { field: "totalCoreHours", order: "DESC" };
$: topQuery = queryStore({
client: client,
query: gql`
query($jobFilters: [JobFilter!]!, $paging: PageRequest!) {
jobsStatistics(filter: $jobFilters, page: $paging, sortBy: TOTALCOREHOURS, groupBy: USER) {
id
totalCoreHours
}
}
`,
variables: { jobFilters, paging }
})
$: footprintsQuery = queryStore({ $: footprintsQuery = queryStore({
client: client, client: client,
query: gql` query: gql`
@ -164,8 +177,8 @@
<Pie <Pie
size={colWidth1} size={colWidth1}
sliceLabel='Core Hours' sliceLabel='Core Hours'
quantities={$statsQuery.data.topUsers.sort((a, b) => b.count - a.count).map((tu) => tu.count)} quantities={$topQuery.data.jobsStatistics.map((tu) => tu.totalCoreHours)}
entities={$statsQuery.data.topUsers.sort((a, b) => b.count - a.count).map((tu) => tu.name)} entities={$topQuery.data.jobsStatistics.map((tu) => tu.id)}
/> />
{/key} {/key}
</div> </div>
@ -173,11 +186,11 @@
<Col> <Col>
<Table> <Table>
<tr class="mb-2"><th>Legend</th><th>User Name</th><th>Core Hours</th></tr> <tr class="mb-2"><th>Legend</th><th>User Name</th><th>Core Hours</th></tr>
{#each $statsQuery.data.topUsers.sort((a, b) => b.count - a.count) as { name, count }, i} {#each $topQuery.data.jobsStatistics as { id, totalCoreHours }, i}
<tr> <tr>
<td><Icon name="circle-fill" style="color: {colors[i]};"/></td> <td><Icon name="circle-fill" style="color: {colors[i]};"/></td>
<th scope="col"><a href="/monitoring/user/{name}?cluster={cluster.name}">{name}</a></th> <th scope="col"><a href="/monitoring/user/{id}?cluster={cluster.name}">{id}</a></th>
<td>{count}</td> <td>{totalCoreHours}</td>
</tr> </tr>
{/each} {/each}
</Table> </Table>

View File

@ -39,8 +39,6 @@
} }
allocatedNodes(cluster: $cluster) { name, count } allocatedNodes(cluster: $cluster) { name, count }
topUsers: jobsCount(filter: $filter, groupBy: USER, weight: NODE_COUNT, limit: 10) { name, count }
topProjects: jobsCount(filter: $filter, groupBy: PROJECT, weight: NODE_COUNT, limit: 10) { name, count }
}`, }`,
variables: { variables: {
cluster: cluster, metrics: ['flops_any', 'mem_bw'], from: from.toISOString(), to: to.toISOString(), cluster: cluster, metrics: ['flops_any', 'mem_bw'], from: from.toISOString(), to: to.toISOString(),
@ -48,6 +46,36 @@
} }
}) })
const paging = { itemsPerPage: 10, page: 1 }; // Top 10
// const sorting = { field: "totalCores", order: "DESC" };
$: topUserQuery = queryStore({
client: client,
query: gql`
query($filter: [JobFilter!]!, $paging: PageRequest!) {
topUser: jobsStatistics(filter: $filter, page: $paging, sortBy: TOTALCORES, groupBy: USER) {
id
totalNodes
totalCores
}
}
`,
variables: { filter: [{ state: ['running'] }, { cluster: { eq: cluster } }], paging }
})
$: topProjectQuery = queryStore({
client: client,
query: gql`
query($filter: [JobFilter!]!, $paging: PageRequest!) {
topProjects: jobsStatistics(filter: $filter, page: $paging, sortBy: TOTALCORES, groupBy: PROJECT) {
id
totalNodes
totalCores
}
}
`,
variables: { filter: [{ state: ['running'] }, { cluster: { eq: cluster } }], paging }
})
const sumUp = (data, subcluster, metric) => data.reduce((sum, node) => node.subCluster == subcluster const sumUp = (data, subcluster, metric) => data.reduce((sum, node) => node.subCluster == subcluster
? sum + (node.metrics.find(m => m.name == metric)?.metric.series.reduce((sum, series) => sum + series.data[series.data.length - 1], 0) || 0) ? sum + (node.metrics.find(m => m.name == metric)?.metric.series.reduce((sum, series) => sum + series.data[series.data.length - 1], 0) || 0)
: sum, 0) : sum, 0)
@ -161,48 +189,47 @@
<Col class="p-2"> <Col class="p-2">
<div bind:clientWidth={colWidth1}> <div bind:clientWidth={colWidth1}>
<h4 class="text-center">Top Users</h4> <h4 class="text-center">Top Users</h4>
{#key $mainQuery.data} {#key $topUserQuery.data}
<Pie <Pie
size={colWidth1} size={colWidth1}
sliceLabel='Jobs' sliceLabel='Jobs'
quantities={$mainQuery.data.topUsers.sort((a, b) => b.count - a.count).map((tu) => tu.count)} quantities={$topUserQuery.data.topUser.map((tu) => tu.totalCores)}
entities={$mainQuery.data.topUsers.sort((a, b) => b.count - a.count).map((tu) => tu.name)} entities={$topUserQuery.data.topUser.map((tu) => tu.id)}
/> />
{/key} {/key}
</div> </div>
</Col> </Col>
<Col class="px-4 py-2"> <Col class="px-4 py-2">
<Table> <Table>
<tr class="mb-2"><th>Legend</th><th>User Name</th><th>Number of Nodes</th></tr> <tr class="mb-2"><th>Legend</th><th>User Name</th><th>Number of Cores</th></tr>
{#each $mainQuery.data.topUsers.sort((a, b) => b.count - a.count) as { name, count }, i} {#each $topUserQuery.data.topUser as { id, totalCores, totalNodes }, i}
<tr> <tr>
<td><Icon name="circle-fill" style="color: {colors[i]};"/></td> <td><Icon name="circle-fill" style="color: {colors[i]};"/></td>
<th scope="col"><a href="/monitoring/user/{name}?cluster={cluster}&state=running">{name}</a></th> <th scope="col"><a href="/monitoring/user/{id}?cluster={cluster}&state=running">{id}</a></th>
<td>{count}</td> <td>{totalCores}</td>
</tr> </tr>
{/each} {/each}
</Table> </Table>
</Col> </Col>
<Col class="p-2"> <Col class="p-2">
<h4 class="text-center">Top Projects</h4> <h4 class="text-center">Top Projects</h4>
{#key $mainQuery.data} {#key $topProjectQuery.data}
<Pie <Pie
size={colWidth1} size={colWidth1}
sliceLabel='Jobs' sliceLabel='Jobs'
quantities={$mainQuery.data.topProjects.sort((a, b) => b.count - a.count).map((tp) => tp.count)} quantities={$topProjectQuery.data.topProjects.map((tp) => tp.totalCores)}
entities={$mainQuery.data.topProjects.sort((a, b) => b.count - a.count).map((tp) => tp.name)} entities={$topProjectQuery.data.topProjects.map((tp) => tp.id)}
/> />
{/key} {/key}
</Col> </Col>
<Col class="px-4 py-2"> <Col class="px-4 py-2">
<Table> <Table>
<tr class="mb-2"><th>Legend</th><th>Project Code</th><th>Number of Nodes</th></tr> <tr class="mb-2"><th>Legend</th><th>Project Code</th><th>Number of Cores</th></tr>
{#each $mainQuery.data.topProjects.sort((a, b) => b.count - a.count) as { name, count }, i} {#each $topProjectQuery.data.topProjects as { id, totalCores, totalNodes }, i}
<tr> <tr>
<td><Icon name="circle-fill" style="color: {colors[i]};"/></td> <td><Icon name="circle-fill" style="color: {colors[i]};"/></td>
<th scope="col"><a href="/monitoring/jobs/?cluster={cluster}&state=running&project={name}&projectMatch=eq">{name}</a></th> <th scope="col"><a href="/monitoring/jobs/?cluster={cluster}&state=running&project={id}&projectMatch=eq">{id}</a></th>
<td>{count}</td> <td>{totalCores}</td>
</tr> </tr>
{/each} {/each}
</Table> </Table>