Add numHWThreads/numAccelerators filter

This commit is contained in:
Lou Knauer 2022-01-27 12:32:28 +01:00
parent 9e92d6076f
commit eca7ad0de0
5 changed files with 50 additions and 18 deletions

@ -1 +1 @@
Subproject commit 80650220a3d481b6fc82c305791073ed92fc9261
Subproject commit c04f69f8fbcb7a25a8c2f4d0b962bff2e35a8478

View File

@ -1352,7 +1352,11 @@ input JobFilter {
cluster: StringInput
partition: StringInput
duration: IntRange
numNodes: IntRange
numNodes: IntRange
numAccelerators: IntRange
numHWThreads: IntRange
startTime: TimeRange
state: [JobState!]
flopsAnyAvg: FloatRange
@ -7071,6 +7075,22 @@ func (ec *executionContext) unmarshalInputJobFilter(ctx context.Context, obj int
if err != nil {
return it, err
}
case "numAccelerators":
var err error
ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("numAccelerators"))
it.NumAccelerators, err = ec.unmarshalOIntRange2ᚖgithubᚗcomᚋClusterCockpitᚋccᚑbackendᚋgraphᚋmodelᚐIntRange(ctx, v)
if err != nil {
return it, err
}
case "numHWThreads":
var err error
ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("numHWThreads"))
it.NumHWThreads, err = ec.unmarshalOIntRange2ᚖgithubᚗcomᚋClusterCockpitᚋccᚑbackendᚋgraphᚋmodelᚐIntRange(ctx, v)
if err != nil {
return it, err
}
case "startTime":
var err error

View File

@ -44,21 +44,23 @@ type IntRangeOutput struct {
}
type JobFilter struct {
Tags []string `json:"tags"`
JobID *StringInput `json:"jobId"`
ArrayJobID *int `json:"arrayJobId"`
User *StringInput `json:"user"`
Project *StringInput `json:"project"`
Cluster *StringInput `json:"cluster"`
Partition *StringInput `json:"partition"`
Duration *IntRange `json:"duration"`
NumNodes *IntRange `json:"numNodes"`
StartTime *TimeRange `json:"startTime"`
State []schema.JobState `json:"state"`
FlopsAnyAvg *FloatRange `json:"flopsAnyAvg"`
MemBwAvg *FloatRange `json:"memBwAvg"`
LoadAvg *FloatRange `json:"loadAvg"`
MemUsedMax *FloatRange `json:"memUsedMax"`
Tags []string `json:"tags"`
JobID *StringInput `json:"jobId"`
ArrayJobID *int `json:"arrayJobId"`
User *StringInput `json:"user"`
Project *StringInput `json:"project"`
Cluster *StringInput `json:"cluster"`
Partition *StringInput `json:"partition"`
Duration *IntRange `json:"duration"`
NumNodes *IntRange `json:"numNodes"`
NumAccelerators *IntRange `json:"numAccelerators"`
NumHWThreads *IntRange `json:"numHWThreads"`
StartTime *TimeRange `json:"startTime"`
State []schema.JobState `json:"state"`
FlopsAnyAvg *FloatRange `json:"flopsAnyAvg"`
MemBwAvg *FloatRange `json:"memBwAvg"`
LoadAvg *FloatRange `json:"loadAvg"`
MemUsedMax *FloatRange `json:"memUsedMax"`
}
type JobMetricWithName struct {

View File

@ -156,6 +156,12 @@ func buildWhereClause(filter *model.JobFilter, query sq.SelectBuilder) sq.Select
if filter.NumNodes != nil {
query = buildIntCondition("job.num_nodes", filter.NumNodes, query)
}
if filter.NumAccelerators != nil {
query = buildIntCondition("job.num_acc", filter.NumAccelerators, query)
}
if filter.NumHWThreads != nil {
query = buildIntCondition("job.num_hwthreads", filter.NumHWThreads, query)
}
if filter.FlopsAnyAvg != nil {
query = buildFloatCondition("job.flops_any_avg", filter.FlopsAnyAvg, query)
}

View File

@ -174,7 +174,11 @@ input JobFilter {
cluster: StringInput
partition: StringInput
duration: IntRange
numNodes: IntRange
numNodes: IntRange
numAccelerators: IntRange
numHWThreads: IntRange
startTime: TimeRange
state: [JobState!]
flopsAnyAvg: FloatRange