Add additional job stats, fix test

This commit is contained in:
Jan Eitzinger 2023-06-09 11:29:07 +02:00
parent 5ba6f0ed3a
commit 616095fe66
4 changed files with 104 additions and 19 deletions

View File

@ -145,6 +145,7 @@ type ComplexityRoot struct {
Name func(childComplexity int) int Name func(childComplexity int) int
RunningJobs func(childComplexity int) int RunningJobs func(childComplexity int) int
ShortJobs func(childComplexity int) int ShortJobs func(childComplexity int) int
TotalAccHours func(childComplexity int) int
TotalCoreHours func(childComplexity int) int TotalCoreHours func(childComplexity int) int
TotalJobs func(childComplexity int) int TotalJobs func(childComplexity int) int
TotalNodeHours func(childComplexity int) int TotalNodeHours func(childComplexity int) int
@ -747,6 +748,13 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
return e.complexity.JobsStatistics.ShortJobs(childComplexity), true return e.complexity.JobsStatistics.ShortJobs(childComplexity), true
case "JobsStatistics.totalAccHours":
if e.complexity.JobsStatistics.TotalAccHours == nil {
break
}
return e.complexity.JobsStatistics.TotalAccHours(childComplexity), true
case "JobsStatistics.totalCoreHours": case "JobsStatistics.totalCoreHours":
if e.complexity.JobsStatistics.TotalCoreHours == nil { if e.complexity.JobsStatistics.TotalCoreHours == nil {
break break
@ -1786,6 +1794,7 @@ type JobsStatistics {
totalWalltime: Int! # Sum of the duration of all matched jobs in hours totalWalltime: Int! # Sum of the duration of all matched jobs in hours
totalNodeHours: Int! # Sum of the node hours of all matched jobs totalNodeHours: Int! # Sum of the node hours of all matched jobs
totalCoreHours: Int! # Sum of the core hours of all matched jobs totalCoreHours: Int! # Sum of the core hours of all matched jobs
totalAccHours: Int! # Sum of the gpu hours of all matched jobs
histDuration: [HistoPoint!]! # value: hour, count: number of jobs with a rounded duration of value histDuration: [HistoPoint!]! # value: hour, count: number of jobs with a rounded duration of value
histNumNodes: [HistoPoint!]! # value: number of nodes, count: number of jobs with that number of nodes histNumNodes: [HistoPoint!]! # value: number of nodes, count: number of jobs with that number of nodes
} }
@ -5122,6 +5131,50 @@ func (ec *executionContext) fieldContext_JobsStatistics_totalCoreHours(ctx conte
return fc, nil return fc, nil
} }
func (ec *executionContext) _JobsStatistics_totalAccHours(ctx context.Context, field graphql.CollectedField, obj *model.JobsStatistics) (ret graphql.Marshaler) {
fc, err := ec.fieldContext_JobsStatistics_totalAccHours(ctx, field)
if err != nil {
return graphql.Null
}
ctx = graphql.WithFieldContext(ctx, fc)
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return obj.TotalAccHours, nil
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
if !graphql.HasFieldError(ctx, fc) {
ec.Errorf(ctx, "must not be null")
}
return graphql.Null
}
res := resTmp.(int)
fc.Result = res
return ec.marshalNInt2int(ctx, field.Selections, res)
}
func (ec *executionContext) fieldContext_JobsStatistics_totalAccHours(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) {
fc = &graphql.FieldContext{
Object: "JobsStatistics",
Field: field,
IsMethod: false,
IsResolver: false,
Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) {
return nil, errors.New("field of type Int does not have child fields")
},
}
return fc, nil
}
func (ec *executionContext) _JobsStatistics_histDuration(ctx context.Context, field graphql.CollectedField, obj *model.JobsStatistics) (ret graphql.Marshaler) { func (ec *executionContext) _JobsStatistics_histDuration(ctx context.Context, field graphql.CollectedField, obj *model.JobsStatistics) (ret graphql.Marshaler) {
fc, err := ec.fieldContext_JobsStatistics_histDuration(ctx, field) fc, err := ec.fieldContext_JobsStatistics_histDuration(ctx, field)
if err != nil { if err != nil {
@ -6983,6 +7036,8 @@ func (ec *executionContext) fieldContext_Query_jobsStatistics(ctx context.Contex
return ec.fieldContext_JobsStatistics_totalNodeHours(ctx, field) return ec.fieldContext_JobsStatistics_totalNodeHours(ctx, field)
case "totalCoreHours": case "totalCoreHours":
return ec.fieldContext_JobsStatistics_totalCoreHours(ctx, field) return ec.fieldContext_JobsStatistics_totalCoreHours(ctx, field)
case "totalAccHours":
return ec.fieldContext_JobsStatistics_totalAccHours(ctx, field)
case "histDuration": case "histDuration":
return ec.fieldContext_JobsStatistics_histDuration(ctx, field) return ec.fieldContext_JobsStatistics_histDuration(ctx, field)
case "histNumNodes": case "histNumNodes":
@ -12207,6 +12262,13 @@ func (ec *executionContext) _JobsStatistics(ctx context.Context, sel ast.Selecti
out.Values[i] = ec._JobsStatistics_totalCoreHours(ctx, field, obj) out.Values[i] = ec._JobsStatistics_totalCoreHours(ctx, field, obj)
if out.Values[i] == graphql.Null {
invalids++
}
case "totalAccHours":
out.Values[i] = ec._JobsStatistics_totalAccHours(ctx, field, obj)
if out.Values[i] == graphql.Null { if out.Values[i] == graphql.Null {
invalids++ invalids++
} }

View File

@ -95,6 +95,7 @@ type JobsStatistics struct {
TotalWalltime int `json:"totalWalltime"` TotalWalltime int `json:"totalWalltime"`
TotalNodeHours int `json:"totalNodeHours"` TotalNodeHours int `json:"totalNodeHours"`
TotalCoreHours int `json:"totalCoreHours"` TotalCoreHours int `json:"totalCoreHours"`
TotalAccHours int `json:"totalAccHours"`
HistDuration []*HistoPoint `json:"histDuration"` HistDuration []*HistoPoint `json:"histDuration"`
HistNumNodes []*HistoPoint `json:"histNumNodes"` HistNumNodes []*HistoPoint `json:"histNumNodes"`
} }

View File

@ -61,18 +61,20 @@ func (r *JobRepository) buildStatsQuery(
castType := r.getCastType() castType := r.getCastType()
if col != "" { if col != "" {
// Scan columns: id, totalJobs, totalWalltime, totalNodeHours, totalCoreHours // Scan columns: id, totalJobs, totalWalltime, totalNodeHours, totalCoreHours, totalAccHours
query = sq.Select(col, "COUNT(job.id)", query = sq.Select(col, "COUNT(job.id)",
fmt.Sprintf("CAST(ROUND(SUM(job.duration) / 3600) as %s)", castType), fmt.Sprintf("CAST(ROUND(SUM(job.duration) / 3600) as %s)", castType),
fmt.Sprintf("CAST(ROUND(SUM(job.duration * job.num_nodes) / 3600) as %s)", castType), fmt.Sprintf("CAST(ROUND(SUM(job.duration * job.num_nodes) / 3600) as %s)", castType),
fmt.Sprintf("CAST(ROUND(SUM(job.duration * job.num_hwthreads) / 3600) as %s)", castType), fmt.Sprintf("CAST(ROUND(SUM(job.duration * job.num_hwthreads) / 3600) as %s)", castType),
fmt.Sprintf("CAST(ROUND(SUM(job.duration * job.num_acc) / 3600) as %s)", castType),
).From("job").GroupBy(col) ).From("job").GroupBy(col)
} else { } else {
// Scan columns: totalJobs, totalWalltime, totalNodeHours, totalCoreHours // Scan columns: totalJobs, totalWalltime, totalNodeHours, totalCoreHours, totalAccHours
query = sq.Select("COUNT(job.id)", query = sq.Select("COUNT(job.id)",
fmt.Sprintf("CAST(ROUND(SUM(job.duration) / 3600) as %s)", castType), fmt.Sprintf("CAST(ROUND(SUM(job.duration) / 3600) as %s)", castType),
fmt.Sprintf("CAST(ROUND(SUM(job.duration * job.num_nodes) / 3600) as %s)", castType), fmt.Sprintf("CAST(ROUND(SUM(job.duration * job.num_nodes) / 3600) as %s)", castType),
fmt.Sprintf("CAST(ROUND(SUM(job.duration * job.num_hwthreads) / 3600) as %s)", castType), fmt.Sprintf("CAST(ROUND(SUM(job.duration * job.num_hwthreads) / 3600) as %s)", castType),
fmt.Sprintf("CAST(ROUND(SUM(job.duration * job.num_acc) / 3600) as %s)", castType),
).From("job") ).From("job")
} }
@ -131,13 +133,22 @@ func (r *JobRepository) JobsStatsGrouped(
for rows.Next() { for rows.Next() {
var id sql.NullString var id sql.NullString
var jobs, walltime, nodeHours, coreHours sql.NullInt64 var jobs, walltime, nodeHours, coreHours, accHours sql.NullInt64
if err := rows.Scan(&id, &jobs, &walltime, &nodeHours, &coreHours); err != nil { if err := rows.Scan(&id, &jobs, &walltime, &nodeHours, &coreHours, &accHours); err != nil {
log.Warn("Error while scanning rows") log.Warn("Error while scanning rows")
return nil, err return nil, err
} }
if id.Valid { if id.Valid {
var totalCoreHours, totalAccHours int
if coreHours.Valid {
totalCoreHours = int(coreHours.Int64)
}
if accHours.Valid {
totalAccHours = int(accHours.Int64)
}
if col == "job.user" { if col == "job.user" {
name := r.getUserName(ctx, id.String) name := r.getUserName(ctx, id.String)
stats = append(stats, stats = append(stats,
@ -145,13 +156,17 @@ func (r *JobRepository) JobsStatsGrouped(
ID: id.String, ID: id.String,
Name: name, Name: name,
TotalJobs: int(jobs.Int64), TotalJobs: int(jobs.Int64),
TotalWalltime: int(walltime.Int64)}) TotalWalltime: int(walltime.Int64),
TotalCoreHours: totalCoreHours,
TotalAccHours: totalAccHours})
} else { } else {
stats = append(stats, stats = append(stats,
&model.JobsStatistics{ &model.JobsStatistics{
ID: id.String, ID: id.String,
TotalJobs: int(jobs.Int64), TotalJobs: int(jobs.Int64),
TotalWalltime: int(walltime.Int64)}) TotalWalltime: int(walltime.Int64),
TotalCoreHours: totalCoreHours,
TotalAccHours: totalAccHours})
} }
} }
} }
@ -174,17 +189,27 @@ func (r *JobRepository) JobsStats(
row := query.RunWith(r.DB).QueryRow() row := query.RunWith(r.DB).QueryRow()
stats := make([]*model.JobsStatistics, 0, 1) stats := make([]*model.JobsStatistics, 0, 1)
var jobs, walltime, nodeHours, coreHours sql.NullInt64 var jobs, walltime, nodeHours, coreHours, accHours sql.NullInt64
if err := row.Scan(&jobs, &walltime, &nodeHours, &coreHours); err != nil { if err := row.Scan(&jobs, &walltime, &nodeHours, &coreHours, &accHours); err != nil {
log.Warn("Error while scanning rows") log.Warn("Error while scanning rows")
return nil, err return nil, err
} }
if jobs.Valid { if jobs.Valid {
var totalCoreHours, totalAccHours int
if coreHours.Valid {
totalCoreHours = int(coreHours.Int64)
}
if accHours.Valid {
totalAccHours = int(accHours.Int64)
}
stats = append(stats, stats = append(stats,
&model.JobsStatistics{ &model.JobsStatistics{
TotalJobs: int(jobs.Int64), TotalJobs: int(jobs.Int64),
TotalWalltime: int(walltime.Int64)}) TotalWalltime: int(walltime.Int64),
TotalCoreHours: totalCoreHours,
TotalAccHours: totalAccHours})
} }
log.Infof("Timer JobStatistics %s", time.Since(start)) log.Infof("Timer JobStatistics %s", time.Since(start))

View File

@ -11,14 +11,11 @@ import (
func TestBuildJobStatsQuery(t *testing.T) { func TestBuildJobStatsQuery(t *testing.T) {
r := setup(t) r := setup(t)
q := r.buildJobsStatsQuery(nil, "USER") q := r.buildStatsQuery(nil, "USER")
sql, _, err := q.ToSql() sql, _, err := q.ToSql()
noErr(t, err) noErr(t, err)
fmt.Printf("SQL: %s\n", sql) fmt.Printf("SQL: %s\n", sql)
if 1 != 5 {
t.Errorf("wrong summary for diagnostic 3\ngot: %d \nwant: 1366", 5)
}
} }