Add tags field to Job

This commit is contained in:
Lou Knauer 2021-04-21 10:12:19 +02:00
parent 499d2cdc22
commit 3004e2909a
6 changed files with 232 additions and 185 deletions

View File

@ -56,6 +56,9 @@ models:
- github.com/99designs/gqlgen/graphql.Int32 - github.com/99designs/gqlgen/graphql.Int32
Job: Job:
model: "github.com/ClusterCockpit/cc-jobarchive/graph/model.Job" model: "github.com/ClusterCockpit/cc-jobarchive/graph/model.Job"
fields:
tags:
resolver: true
JobTag: JobTag:
model: "github.com/ClusterCockpit/cc-jobarchive/graph/model.JobTag" model: "github.com/ClusterCockpit/cc-jobarchive/graph/model.JobTag"
Timestamp: Timestamp:

View File

@ -36,6 +36,7 @@ type Config struct {
} }
type ResolverRoot interface { type ResolverRoot interface {
Job() JobResolver
Query() QueryResolver Query() QueryResolver
} }
@ -62,6 +63,7 @@ type ComplexityRoot struct {
NumNodes func(childComplexity int) int NumNodes func(childComplexity int) int
ProjectID func(childComplexity int) int ProjectID func(childComplexity int) int
StartTime func(childComplexity int) int StartTime func(childComplexity int) int
Tags func(childComplexity int) int
UserID func(childComplexity int) int UserID func(childComplexity int) int
} }
@ -114,20 +116,21 @@ type ComplexityRoot struct {
Query struct { Query struct {
JobByID func(childComplexity int, jobID string) int JobByID func(childComplexity int, jobID string) int
JobMetrics func(childComplexity int, jobID string, metrics []*string) int JobMetrics func(childComplexity int, jobID string, metrics []*string) int
JobTags func(childComplexity int, jobID *string) int
Jobs func(childComplexity int, filter *model.JobFilterList, page *model.PageRequest, order *model.OrderByInput) int Jobs func(childComplexity int, filter *model.JobFilterList, page *model.PageRequest, order *model.OrderByInput) int
JobsByTag func(childComplexity int, tag string) int
JobsStatistics func(childComplexity int, filter *model.JobFilterList) int JobsStatistics func(childComplexity int, filter *model.JobFilterList) int
Tags func(childComplexity int, jobID *string) int
} }
} }
type JobResolver interface {
Tags(ctx context.Context, obj *model.Job) ([]*model.JobTag, error)
}
type QueryResolver interface { type QueryResolver interface {
JobByID(ctx context.Context, jobID string) (*model.Job, error) JobByID(ctx context.Context, jobID string) (*model.Job, error)
Jobs(ctx context.Context, filter *model.JobFilterList, page *model.PageRequest, order *model.OrderByInput) (*model.JobResultList, error) Jobs(ctx context.Context, filter *model.JobFilterList, page *model.PageRequest, order *model.OrderByInput) (*model.JobResultList, error)
JobsStatistics(ctx context.Context, filter *model.JobFilterList) (*model.JobsStatistics, error) JobsStatistics(ctx context.Context, filter *model.JobFilterList) (*model.JobsStatistics, error)
JobMetrics(ctx context.Context, jobID string, metrics []*string) ([]*model.JobMetricWithName, error) JobMetrics(ctx context.Context, jobID string, metrics []*string) ([]*model.JobMetricWithName, error)
JobTags(ctx context.Context, jobID *string) ([]*model.JobTag, error) Tags(ctx context.Context, jobID *string) ([]*model.JobTag, error)
JobsByTag(ctx context.Context, tag string) ([]string, error)
} }
type executableSchema struct { type executableSchema struct {
@ -250,6 +253,13 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
return e.complexity.Job.StartTime(childComplexity), true return e.complexity.Job.StartTime(childComplexity), true
case "Job.tags":
if e.complexity.Job.Tags == nil {
break
}
return e.complexity.Job.Tags(childComplexity), true
case "Job.userId": case "Job.userId":
if e.complexity.Job.UserID == nil { if e.complexity.Job.UserID == nil {
break break
@ -456,18 +466,6 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
return e.complexity.Query.JobMetrics(childComplexity, args["jobId"].(string), args["metrics"].([]*string)), true return e.complexity.Query.JobMetrics(childComplexity, args["jobId"].(string), args["metrics"].([]*string)), true
case "Query.jobTags":
if e.complexity.Query.JobTags == nil {
break
}
args, err := ec.field_Query_jobTags_args(context.TODO(), rawArgs)
if err != nil {
return 0, false
}
return e.complexity.Query.JobTags(childComplexity, args["jobId"].(*string)), true
case "Query.jobs": case "Query.jobs":
if e.complexity.Query.Jobs == nil { if e.complexity.Query.Jobs == nil {
break break
@ -480,18 +478,6 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
return e.complexity.Query.Jobs(childComplexity, args["filter"].(*model.JobFilterList), args["page"].(*model.PageRequest), args["order"].(*model.OrderByInput)), true return e.complexity.Query.Jobs(childComplexity, args["filter"].(*model.JobFilterList), args["page"].(*model.PageRequest), args["order"].(*model.OrderByInput)), true
case "Query.jobsByTag":
if e.complexity.Query.JobsByTag == nil {
break
}
args, err := ec.field_Query_jobsByTag_args(context.TODO(), rawArgs)
if err != nil {
return 0, false
}
return e.complexity.Query.JobsByTag(childComplexity, args["tag"].(string)), true
case "Query.jobsStatistics": case "Query.jobsStatistics":
if e.complexity.Query.JobsStatistics == nil { if e.complexity.Query.JobsStatistics == nil {
break break
@ -504,6 +490,18 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
return e.complexity.Query.JobsStatistics(childComplexity, args["filter"].(*model.JobFilterList)), true return e.complexity.Query.JobsStatistics(childComplexity, args["filter"].(*model.JobFilterList)), true
case "Query.tags":
if e.complexity.Query.Tags == nil {
break
}
args, err := ec.field_Query_tags_args(context.TODO(), rawArgs)
if err != nil {
return 0, false
}
return e.complexity.Query.Tags(childComplexity, args["jobId"].(*string)), true
} }
return 0, false return 0, false
} }
@ -571,6 +569,8 @@ var sources = []*ast.Source{
memBw_avg: Float memBw_avg: Float
netBw_avg: Float netBw_avg: Float
fileBw_avg: Float fileBw_avg: Float
tags: [JobTag!]
} }
type JobMetric { type JobMetric {
@ -611,10 +611,7 @@ type Query {
jobMetrics(jobId: String!, metrics: [String]): [JobMetricWithName]! jobMetrics(jobId: String!, metrics: [String]): [JobMetricWithName]!
# Return all known tags or, if jobId is specified, only tags from this job # Return all known tags or, if jobId is specified, only tags from this job
jobTags(jobId: String): [JobTag!]! tags(jobId: String): [JobTag!]!
# For a tag ID, return the ID's of all jobs with that tag
jobsByTag(tag: ID!): [ID!]!
} }
input StartJobInput { input StartJobInput {
@ -646,6 +643,8 @@ input JobFilterList {
} }
input JobFilter { input JobFilter {
tagName: String
tagType: String
jobId: StringInput jobId: StringInput
userId: StringInput userId: StringInput
projectId: StringInput projectId: StringInput
@ -782,36 +781,6 @@ func (ec *executionContext) field_Query_jobMetrics_args(ctx context.Context, raw
return args, nil return args, nil
} }
func (ec *executionContext) field_Query_jobTags_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) {
var err error
args := map[string]interface{}{}
var arg0 *string
if tmp, ok := rawArgs["jobId"]; ok {
ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("jobId"))
arg0, err = ec.unmarshalOString2ᚖstring(ctx, tmp)
if err != nil {
return nil, err
}
}
args["jobId"] = arg0
return args, nil
}
func (ec *executionContext) field_Query_jobsByTag_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) {
var err error
args := map[string]interface{}{}
var arg0 string
if tmp, ok := rawArgs["tag"]; ok {
ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("tag"))
arg0, err = ec.unmarshalNID2string(ctx, tmp)
if err != nil {
return nil, err
}
}
args["tag"] = arg0
return args, nil
}
func (ec *executionContext) field_Query_jobsStatistics_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { func (ec *executionContext) field_Query_jobsStatistics_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) {
var err error var err error
args := map[string]interface{}{} args := map[string]interface{}{}
@ -860,6 +829,21 @@ func (ec *executionContext) field_Query_jobs_args(ctx context.Context, rawArgs m
return args, nil return args, nil
} }
func (ec *executionContext) field_Query_tags_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) {
var err error
args := map[string]interface{}{}
var arg0 *string
if tmp, ok := rawArgs["jobId"]; ok {
ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("jobId"))
arg0, err = ec.unmarshalOString2ᚖstring(ctx, tmp)
if err != nil {
return nil, err
}
}
args["jobId"] = arg0
return args, nil
}
func (ec *executionContext) field___Type_enumValues_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { func (ec *executionContext) field___Type_enumValues_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) {
var err error var err error
args := map[string]interface{}{} args := map[string]interface{}{}
@ -1443,6 +1427,38 @@ func (ec *executionContext) _Job_fileBw_avg(ctx context.Context, field graphql.C
return ec.marshalOFloat2ᚖfloat64(ctx, field.Selections, res) return ec.marshalOFloat2ᚖfloat64(ctx, field.Selections, res)
} }
func (ec *executionContext) _Job_tags(ctx context.Context, field graphql.CollectedField, obj *model.Job) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "Job",
Field: field,
Args: nil,
IsMethod: true,
IsResolver: true,
}
ctx = graphql.WithFieldContext(ctx, fc)
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return ec.resolvers.Job().Tags(rctx, obj)
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
return graphql.Null
}
res := resTmp.([]*model.JobTag)
fc.Result = res
return ec.marshalOJobTag2ᚕᚖgithubᚗcomᚋClusterCockpitᚋccᚑjobarchiveᚋgraphᚋmodelᚐJobTagᚄ(ctx, field.Selections, res)
}
func (ec *executionContext) _JobMetric_unit(ctx context.Context, field graphql.CollectedField, obj *model.JobMetric) (ret graphql.Marshaler) { func (ec *executionContext) _JobMetric_unit(ctx context.Context, field graphql.CollectedField, obj *model.JobMetric) (ret graphql.Marshaler) {
defer func() { defer func() {
if r := recover(); r != nil { if r := recover(); r != nil {
@ -2471,7 +2487,7 @@ func (ec *executionContext) _Query_jobMetrics(ctx context.Context, field graphql
return ec.marshalNJobMetricWithName2ᚕᚖgithubᚗcomᚋClusterCockpitᚋccᚑjobarchiveᚋgraphᚋmodelᚐJobMetricWithName(ctx, field.Selections, res) return ec.marshalNJobMetricWithName2ᚕᚖgithubᚗcomᚋClusterCockpitᚋccᚑjobarchiveᚋgraphᚋmodelᚐJobMetricWithName(ctx, field.Selections, res)
} }
func (ec *executionContext) _Query_jobTags(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { func (ec *executionContext) _Query_tags(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) {
defer func() { defer func() {
if r := recover(); r != nil { if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r)) ec.Error(ctx, ec.Recover(ctx, r))
@ -2488,7 +2504,7 @@ func (ec *executionContext) _Query_jobTags(ctx context.Context, field graphql.Co
ctx = graphql.WithFieldContext(ctx, fc) ctx = graphql.WithFieldContext(ctx, fc)
rawArgs := field.ArgumentMap(ec.Variables) rawArgs := field.ArgumentMap(ec.Variables)
args, err := ec.field_Query_jobTags_args(ctx, rawArgs) args, err := ec.field_Query_tags_args(ctx, rawArgs)
if err != nil { if err != nil {
ec.Error(ctx, err) ec.Error(ctx, err)
return graphql.Null return graphql.Null
@ -2496,7 +2512,7 @@ func (ec *executionContext) _Query_jobTags(ctx context.Context, field graphql.Co
fc.Args = args fc.Args = args
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children ctx = rctx // use context from middleware stack in children
return ec.resolvers.Query().JobTags(rctx, args["jobId"].(*string)) return ec.resolvers.Query().Tags(rctx, args["jobId"].(*string))
}) })
if err != nil { if err != nil {
ec.Error(ctx, err) ec.Error(ctx, err)
@ -2513,48 +2529,6 @@ func (ec *executionContext) _Query_jobTags(ctx context.Context, field graphql.Co
return ec.marshalNJobTag2ᚕᚖgithubᚗcomᚋClusterCockpitᚋccᚑjobarchiveᚋgraphᚋmodelᚐJobTagᚄ(ctx, field.Selections, res) return ec.marshalNJobTag2ᚕᚖgithubᚗcomᚋClusterCockpitᚋccᚑjobarchiveᚋgraphᚋmodelᚐJobTagᚄ(ctx, field.Selections, res)
} }
func (ec *executionContext) _Query_jobsByTag(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "Query",
Field: field,
Args: nil,
IsMethod: true,
IsResolver: true,
}
ctx = graphql.WithFieldContext(ctx, fc)
rawArgs := field.ArgumentMap(ec.Variables)
args, err := ec.field_Query_jobsByTag_args(ctx, rawArgs)
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
fc.Args = args
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return ec.resolvers.Query().JobsByTag(rctx, args["tag"].(string))
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
if !graphql.HasFieldError(ctx, fc) {
ec.Errorf(ctx, "must not be null")
}
return graphql.Null
}
res := resTmp.([]string)
fc.Result = res
return ec.marshalNID2ᚕstringᚄ(ctx, field.Selections, res)
}
func (ec *executionContext) _Query___type(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { func (ec *executionContext) _Query___type(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) {
defer func() { defer func() {
if r := recover(); r != nil { if r := recover(); r != nil {
@ -3843,6 +3817,22 @@ func (ec *executionContext) unmarshalInputJobFilter(ctx context.Context, obj int
for k, v := range asMap { for k, v := range asMap {
switch k { switch k {
case "tagName":
var err error
ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("tagName"))
it.TagName, err = ec.unmarshalOString2ᚖstring(ctx, v)
if err != nil {
return it, err
}
case "tagType":
var err error
ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("tagType"))
it.TagType, err = ec.unmarshalOString2ᚖstring(ctx, v)
if err != nil {
return it, err
}
case "jobId": case "jobId":
var err error var err error
@ -4199,47 +4189,47 @@ func (ec *executionContext) _Job(ctx context.Context, sel ast.SelectionSet, obj
case "id": case "id":
out.Values[i] = ec._Job_id(ctx, field, obj) out.Values[i] = ec._Job_id(ctx, field, obj)
if out.Values[i] == graphql.Null { if out.Values[i] == graphql.Null {
invalids++ atomic.AddUint32(&invalids, 1)
} }
case "jobId": case "jobId":
out.Values[i] = ec._Job_jobId(ctx, field, obj) out.Values[i] = ec._Job_jobId(ctx, field, obj)
if out.Values[i] == graphql.Null { if out.Values[i] == graphql.Null {
invalids++ atomic.AddUint32(&invalids, 1)
} }
case "userId": case "userId":
out.Values[i] = ec._Job_userId(ctx, field, obj) out.Values[i] = ec._Job_userId(ctx, field, obj)
if out.Values[i] == graphql.Null { if out.Values[i] == graphql.Null {
invalids++ atomic.AddUint32(&invalids, 1)
} }
case "projectId": case "projectId":
out.Values[i] = ec._Job_projectId(ctx, field, obj) out.Values[i] = ec._Job_projectId(ctx, field, obj)
if out.Values[i] == graphql.Null { if out.Values[i] == graphql.Null {
invalids++ atomic.AddUint32(&invalids, 1)
} }
case "clusterId": case "clusterId":
out.Values[i] = ec._Job_clusterId(ctx, field, obj) out.Values[i] = ec._Job_clusterId(ctx, field, obj)
if out.Values[i] == graphql.Null { if out.Values[i] == graphql.Null {
invalids++ atomic.AddUint32(&invalids, 1)
} }
case "startTime": case "startTime":
out.Values[i] = ec._Job_startTime(ctx, field, obj) out.Values[i] = ec._Job_startTime(ctx, field, obj)
if out.Values[i] == graphql.Null { if out.Values[i] == graphql.Null {
invalids++ atomic.AddUint32(&invalids, 1)
} }
case "duration": case "duration":
out.Values[i] = ec._Job_duration(ctx, field, obj) out.Values[i] = ec._Job_duration(ctx, field, obj)
if out.Values[i] == graphql.Null { if out.Values[i] == graphql.Null {
invalids++ atomic.AddUint32(&invalids, 1)
} }
case "numNodes": case "numNodes":
out.Values[i] = ec._Job_numNodes(ctx, field, obj) out.Values[i] = ec._Job_numNodes(ctx, field, obj)
if out.Values[i] == graphql.Null { if out.Values[i] == graphql.Null {
invalids++ atomic.AddUint32(&invalids, 1)
} }
case "hasProfile": case "hasProfile":
out.Values[i] = ec._Job_hasProfile(ctx, field, obj) out.Values[i] = ec._Job_hasProfile(ctx, field, obj)
if out.Values[i] == graphql.Null { if out.Values[i] == graphql.Null {
invalids++ atomic.AddUint32(&invalids, 1)
} }
case "memUsed_max": case "memUsed_max":
out.Values[i] = ec._Job_memUsed_max(ctx, field, obj) out.Values[i] = ec._Job_memUsed_max(ctx, field, obj)
@ -4251,6 +4241,17 @@ func (ec *executionContext) _Job(ctx context.Context, sel ast.SelectionSet, obj
out.Values[i] = ec._Job_netBw_avg(ctx, field, obj) out.Values[i] = ec._Job_netBw_avg(ctx, field, obj)
case "fileBw_avg": case "fileBw_avg":
out.Values[i] = ec._Job_fileBw_avg(ctx, field, obj) out.Values[i] = ec._Job_fileBw_avg(ctx, field, obj)
case "tags":
field := field
out.Concurrently(i, func() (res graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
}
}()
res = ec._Job_tags(ctx, field, obj)
return res
})
default: default:
panic("unknown field " + strconv.Quote(field.Name)) panic("unknown field " + strconv.Quote(field.Name))
} }
@ -4597,7 +4598,7 @@ func (ec *executionContext) _Query(ctx context.Context, sel ast.SelectionSet) gr
} }
return res return res
}) })
case "jobTags": case "tags":
field := field field := field
out.Concurrently(i, func() (res graphql.Marshaler) { out.Concurrently(i, func() (res graphql.Marshaler) {
defer func() { defer func() {
@ -4605,21 +4606,7 @@ func (ec *executionContext) _Query(ctx context.Context, sel ast.SelectionSet) gr
ec.Error(ctx, ec.Recover(ctx, r)) ec.Error(ctx, ec.Recover(ctx, r))
} }
}() }()
res = ec._Query_jobTags(ctx, field) res = ec._Query_tags(ctx, field)
if res == graphql.Null {
atomic.AddUint32(&invalids, 1)
}
return res
})
case "jobsByTag":
field := field
out.Concurrently(i, func() (res graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
}
}()
res = ec._Query_jobsByTag(ctx, field)
if res == graphql.Null { if res == graphql.Null {
atomic.AddUint32(&invalids, 1) atomic.AddUint32(&invalids, 1)
} }
@ -4997,36 +4984,6 @@ func (ec *executionContext) marshalNID2string(ctx context.Context, sel ast.Selec
return res return res
} }
func (ec *executionContext) unmarshalNID2ᚕstringᚄ(ctx context.Context, v interface{}) ([]string, error) {
var vSlice []interface{}
if v != nil {
if tmp1, ok := v.([]interface{}); ok {
vSlice = tmp1
} else {
vSlice = []interface{}{v}
}
}
var err error
res := make([]string, len(vSlice))
for i := range vSlice {
ctx := graphql.WithPathContext(ctx, graphql.NewPathWithIndex(i))
res[i], err = ec.unmarshalNID2string(ctx, vSlice[i])
if err != nil {
return nil, err
}
}
return res, nil
}
func (ec *executionContext) marshalNID2ᚕstringᚄ(ctx context.Context, sel ast.SelectionSet, v []string) graphql.Marshaler {
ret := make(graphql.Array, len(v))
for i := range v {
ret[i] = ec.marshalNID2string(ctx, sel, v[i])
}
return ret
}
func (ec *executionContext) unmarshalNInt2int(ctx context.Context, v interface{}) (int, error) { func (ec *executionContext) unmarshalNInt2int(ctx context.Context, v interface{}) (int, error) {
res, err := graphql.UnmarshalInt(v) res, err := graphql.UnmarshalInt(v)
return res, graphql.ErrorOnPath(ctx, err) return res, graphql.ErrorOnPath(ctx, err)
@ -5644,6 +5601,46 @@ func (ec *executionContext) marshalOJobMetricWithName2ᚖgithubᚗcomᚋClusterC
return ec._JobMetricWithName(ctx, sel, v) return ec._JobMetricWithName(ctx, sel, v)
} }
func (ec *executionContext) marshalOJobTag2ᚕᚖgithubᚗcomᚋClusterCockpitᚋccᚑjobarchiveᚋgraphᚋmodelᚐJobTagᚄ(ctx context.Context, sel ast.SelectionSet, v []*model.JobTag) graphql.Marshaler {
if v == nil {
return graphql.Null
}
ret := make(graphql.Array, len(v))
var wg sync.WaitGroup
isLen1 := len(v) == 1
if !isLen1 {
wg.Add(len(v))
}
for i := range v {
i := i
fc := &graphql.FieldContext{
Index: &i,
Result: &v[i],
}
ctx := graphql.WithFieldContext(ctx, fc)
f := func(i int) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = nil
}
}()
if !isLen1 {
defer wg.Done()
}
ret[i] = ec.marshalNJobTag2ᚖgithubᚗcomᚋClusterCockpitᚋccᚑjobarchiveᚋgraphᚋmodelᚐJobTag(ctx, sel, v[i])
}
if isLen1 {
f(i)
} else {
go f(i)
}
}
wg.Wait()
return ret
}
func (ec *executionContext) unmarshalOOrderByInput2ᚖgithubᚗcomᚋClusterCockpitᚋccᚑjobarchiveᚋgraphᚋmodelᚐOrderByInput(ctx context.Context, v interface{}) (*model.OrderByInput, error) { func (ec *executionContext) unmarshalOOrderByInput2ᚖgithubᚗcomᚋClusterCockpitᚋccᚑjobarchiveᚋgraphᚋmodelᚐOrderByInput(ctx context.Context, v interface{}) (*model.OrderByInput, error) {
if v == nil { if v == nil {
return nil, nil return nil, nil

View File

@ -22,6 +22,7 @@ type Job struct {
MemBw_avg *float64 `json:"memBwAvg" db:"mem_bw_avg"` MemBw_avg *float64 `json:"memBwAvg" db:"mem_bw_avg"`
NetBw_avg *float64 `json:"netBwAvg" db:"net_bw_avg"` NetBw_avg *float64 `json:"netBwAvg" db:"net_bw_avg"`
FileBw_avg *float64 `json:"fileBwAvg" db:"file_bw_avg"` FileBw_avg *float64 `json:"fileBwAvg" db:"file_bw_avg"`
Tags []JobTag `json:"tags"`
} }
type JobTag struct { type JobTag struct {

View File

@ -35,6 +35,8 @@ type IntRange struct {
} }
type JobFilter struct { type JobFilter struct {
TagName *string `json:"tagName"`
TagType *string `json:"tagType"`
JobID *StringInput `json:"jobId"` JobID *StringInput `json:"jobId"`
UserID *StringInput `json:"userId"` UserID *StringInput `json:"userId"`
ProjectID *StringInput `json:"projectId"` ProjectID *StringInput `json:"projectId"`

View File

@ -7,7 +7,6 @@ import (
"log" "log"
"strings" "strings"
"os" "os"
"errors"
"strconv" "strconv"
"encoding/json" "encoding/json"
@ -58,12 +57,26 @@ func addTimeCondition(conditions []string, field string, input *model.TimeRange)
return conditions return conditions
} }
func buildQueryConditions(filterList *model.JobFilterList) string { func buildQueryConditions(filterList *model.JobFilterList) (string, string) {
var conditions []string var conditions []string
var join string
joinJobtags := `
JOIN jobtag ON jobtag.job_id = job.id
JOIN tag ON tag.id = jobtag.tag_id
`
for _, condition := range filterList.List { for _, condition := range filterList.List {
if condition.TagName != nil {
conditions = append(conditions, fmt.Sprintf("tag.tag_name = '%s'", *condition.TagName))
join = joinJobtags
}
if condition.TagType != nil {
conditions = append(conditions, fmt.Sprintf("tag.tag_type = '%s'", *condition.TagType))
join = joinJobtags
}
if condition.JobID != nil { if condition.JobID != nil {
conditions = addStringCondition(conditions, `job_id`, condition.JobID) conditions = addStringCondition(conditions, `job.job_id`, condition.JobID)
} }
if condition.UserID != nil { if condition.UserID != nil {
conditions = addStringCondition(conditions, `user_id`, condition.UserID) conditions = addStringCondition(conditions, `user_id`, condition.UserID)
@ -85,7 +98,7 @@ func buildQueryConditions(filterList *model.JobFilterList) string {
} }
} }
return strings.Join(conditions, " AND ") return strings.Join(conditions, " AND "), join
} }
func readJobDataFile(jobId string) ([]byte, error) { func readJobDataFile(jobId string) ([]byte, error) {
@ -141,7 +154,7 @@ func (r *queryResolver) Jobs(
var jobs []*model.Job var jobs []*model.Job
var limit, offset int var limit, offset int
var qc, ob string var qc, ob, jo string
if page != nil { if page != nil {
limit = *page.ItemsPerPage limit = *page.ItemsPerPage
@ -152,18 +165,22 @@ func (r *queryResolver) Jobs(
} }
if filterList != nil { if filterList != nil {
qc = buildQueryConditions(filterList) qc, jo = buildQueryConditions(filterList)
if qc != "" { if qc != "" {
qc = `WHERE ` + qc qc = `WHERE ` + qc
} }
if jo != "" {
qc = jo + qc
}
} }
if orderBy != nil { if orderBy != nil {
ob = fmt.Sprintf("ORDER BY %s %s", orderBy.Field, *orderBy.Order) ob = fmt.Sprintf("ORDER BY %s %s", orderBy.Field, *orderBy.Order)
} }
qstr := `SELECT * ` qstr := `SELECT job.* `
qstr += fmt.Sprintf("FROM job %s %s LIMIT %d OFFSET %d", qc, ob, limit, offset) qstr += fmt.Sprintf("FROM job %s %s LIMIT %d OFFSET %d", qc, ob, limit, offset)
log.Printf("%s", qstr) log.Printf("%s", qstr)
@ -201,14 +218,18 @@ func (r *queryResolver) Jobs(
func (r *queryResolver) JobsStatistics( func (r *queryResolver) JobsStatistics(
ctx context.Context, ctx context.Context,
filterList *model.JobFilterList) (*model.JobsStatistics, error) { filterList *model.JobFilterList) (*model.JobsStatistics, error) {
var qc string var qc, jo string
if filterList != nil { if filterList != nil {
qc = buildQueryConditions(filterList) qc, jo = buildQueryConditions(filterList)
if qc != "" { if qc != "" {
qc = `WHERE ` + qc qc = `WHERE ` + qc
} }
if jo != "" {
qc = jo + qc
}
} }
// TODO Change current node hours to core hours // TODO Change current node hours to core hours
@ -305,7 +326,7 @@ func (r *queryResolver) JobMetrics(
return list, nil return list, nil
} }
func (r *queryResolver) JobTags( func (r *queryResolver) Tags(
ctx context.Context, jobId *string) ([]*model.JobTag, error) { ctx context.Context, jobId *string) ([]*model.JobTag, error) {
if jobId == nil { if jobId == nil {
@ -316,20 +337,22 @@ func (r *queryResolver) JobTags(
tags := []*model.JobTag{} tags := []*model.JobTag{}
for rows.Next() { for rows.Next() {
var tag *model.JobTag var tag model.JobTag
err = rows.StructScan(&tag) err = rows.StructScan(&tag)
if err != nil { if err != nil {
return nil, err return nil, err
} }
tags = append(tags, tag) tags = append(tags, &tag)
} }
return tags, nil return tags, nil
} }
/* TODO: Use cluster id? */
query := ` query := `
SELECT id, tag_name, tag_type FROM tag SELECT tag.id, tag.tag_name, tag.tag_type FROM tag
JOIN jobtag ON tag.id = jobtag.tag_id JOIN jobtag ON tag.id = jobtag.tag_id
WHERE jobtag.job_id = $1 JOIN job ON job.id = jobtag.job_id
WHERE job.job_id = $1
` `
rows, err := r.DB.Queryx(query, jobId) rows, err := r.DB.Queryx(query, jobId)
if err != nil { if err != nil {
@ -338,23 +361,43 @@ func (r *queryResolver) JobTags(
tags := []*model.JobTag{} tags := []*model.JobTag{}
for rows.Next() { for rows.Next() {
var tag *model.JobTag var tag model.JobTag
err = rows.StructScan(&tag) err = rows.StructScan(&tag)
if err != nil { if err != nil {
return nil, err return nil, err
} }
tags = append(tags, tag) tags = append(tags, &tag)
} }
return tags, nil return tags, nil
} }
func (r *queryResolver) JobsByTag( func (r *jobResolver) Tags(ctx context.Context, job *model.Job) ([]*model.JobTag, error) {
ctx context.Context, jobId string) ([]string, error) { query := `
SELECT tag.id, tag.tag_name, tag.tag_type FROM tag
JOIN jobtag ON tag.id = jobtag.tag_id
WHERE jobtag.job_id = $1
`
rows, err := r.DB.Queryx(query, job.ID)
if err != nil {
return nil, err
}
return nil, errors.New("unimplemented") tags := []*model.JobTag{}
for rows.Next() {
var tag model.JobTag
err = rows.StructScan(&tag)
if err != nil {
return nil, err
}
tags = append(tags, &tag)
}
return tags, nil
} }
func (r *Resolver) Job() generated.JobResolver { return &jobResolver{r} }
func (r *Resolver) Query() generated.QueryResolver { return &queryResolver{r} } func (r *Resolver) Query() generated.QueryResolver { return &queryResolver{r} }
type jobResolver struct{ *Resolver }
type queryResolver struct{ *Resolver } type queryResolver struct{ *Resolver }

View File

@ -15,6 +15,8 @@ type Job {
memBw_avg: Float memBw_avg: Float
netBw_avg: Float netBw_avg: Float
fileBw_avg: Float fileBw_avg: Float
tags: [JobTag!]
} }
type JobMetric { type JobMetric {
@ -55,10 +57,7 @@ type Query {
jobMetrics(jobId: String!, metrics: [String]): [JobMetricWithName]! jobMetrics(jobId: String!, metrics: [String]): [JobMetricWithName]!
# Return all known tags or, if jobId is specified, only tags from this job # Return all known tags or, if jobId is specified, only tags from this job
jobTags(jobId: String): [JobTag!]! tags(jobId: String): [JobTag!]!
# For a tag ID, return the ID's of all jobs with that tag
jobsByTag(tag: ID!): [ID!]!
} }
input StartJobInput { input StartJobInput {
@ -90,6 +89,8 @@ input JobFilterList {
} }
input JobFilter { input JobFilter {
tagName: String
tagType: String
jobId: StringInput jobId: StringInput
userId: StringInput userId: StringInput
projectId: StringInput projectId: StringInput