mirror of
https://github.com/ClusterCockpit/cc-backend
synced 2024-12-27 05:49:04 +01:00
change tags filter api, add load_avg, use camelCase in API
This commit is contained in:
parent
365b1a2066
commit
836c1aea3a
@ -62,21 +62,21 @@ type ComplexityRoot struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
Job struct {
|
Job struct {
|
||||||
ClusterID func(childComplexity int) int
|
ClusterID func(childComplexity int) int
|
||||||
Duration func(childComplexity int) int
|
Duration func(childComplexity int) int
|
||||||
FileBw_avg func(childComplexity int) int
|
FileBwAvg func(childComplexity int) int
|
||||||
FlopsAny_avg func(childComplexity int) int
|
FlopsAnyAvg func(childComplexity int) int
|
||||||
HasProfile func(childComplexity int) int
|
HasProfile func(childComplexity int) int
|
||||||
ID func(childComplexity int) int
|
ID func(childComplexity int) int
|
||||||
JobID func(childComplexity int) int
|
JobID func(childComplexity int) int
|
||||||
MemBw_avg func(childComplexity int) int
|
MemBwAvg func(childComplexity int) int
|
||||||
MemUsed_max func(childComplexity int) int
|
MemUsedMax func(childComplexity int) int
|
||||||
NetBw_avg func(childComplexity int) int
|
NetBwAvg func(childComplexity int) int
|
||||||
NumNodes func(childComplexity int) int
|
NumNodes func(childComplexity int) int
|
||||||
ProjectID func(childComplexity int) int
|
ProjectID func(childComplexity int) int
|
||||||
StartTime func(childComplexity int) int
|
StartTime func(childComplexity int) int
|
||||||
Tags func(childComplexity int) int
|
Tags func(childComplexity int) int
|
||||||
UserID func(childComplexity int) int
|
UserID func(childComplexity int) int
|
||||||
}
|
}
|
||||||
|
|
||||||
JobMetric struct {
|
JobMetric struct {
|
||||||
@ -138,7 +138,7 @@ type ComplexityRoot struct {
|
|||||||
Query struct {
|
Query struct {
|
||||||
Clusters func(childComplexity int) int
|
Clusters func(childComplexity int) int
|
||||||
JobByID func(childComplexity int, jobID string) int
|
JobByID func(childComplexity int, jobID string) int
|
||||||
JobMetrics func(childComplexity int, jobID string, metrics []*string) int
|
JobMetrics func(childComplexity int, jobID string, clusterID *string, startTime *time.Time, metrics []*string) int
|
||||||
Jobs func(childComplexity int, filter *model.JobFilterList, page *model.PageRequest, order *model.OrderByInput) int
|
Jobs func(childComplexity int, filter *model.JobFilterList, page *model.PageRequest, order *model.OrderByInput) int
|
||||||
JobsStatistics func(childComplexity int, filter *model.JobFilterList) int
|
JobsStatistics func(childComplexity int, filter *model.JobFilterList) int
|
||||||
Tags func(childComplexity int, jobID *string) int
|
Tags func(childComplexity int, jobID *string) int
|
||||||
@ -153,7 +153,7 @@ type QueryResolver interface {
|
|||||||
JobByID(ctx context.Context, jobID string) (*model.Job, error)
|
JobByID(ctx context.Context, jobID string) (*model.Job, error)
|
||||||
Jobs(ctx context.Context, filter *model.JobFilterList, page *model.PageRequest, order *model.OrderByInput) (*model.JobResultList, error)
|
Jobs(ctx context.Context, filter *model.JobFilterList, page *model.PageRequest, order *model.OrderByInput) (*model.JobResultList, error)
|
||||||
JobsStatistics(ctx context.Context, filter *model.JobFilterList) (*model.JobsStatistics, error)
|
JobsStatistics(ctx context.Context, filter *model.JobFilterList) (*model.JobsStatistics, error)
|
||||||
JobMetrics(ctx context.Context, jobID string, metrics []*string) ([]*model.JobMetricWithName, error)
|
JobMetrics(ctx context.Context, jobID string, clusterID *string, startTime *time.Time, metrics []*string) ([]*model.JobMetricWithName, error)
|
||||||
Tags(ctx context.Context, jobID *string) ([]*model.JobTag, error)
|
Tags(ctx context.Context, jobID *string) ([]*model.JobTag, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -264,18 +264,18 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
|
|||||||
return e.complexity.Job.Duration(childComplexity), true
|
return e.complexity.Job.Duration(childComplexity), true
|
||||||
|
|
||||||
case "Job.fileBw_avg":
|
case "Job.fileBw_avg":
|
||||||
if e.complexity.Job.FileBw_avg == nil {
|
if e.complexity.Job.FileBwAvg == nil {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
||||||
return e.complexity.Job.FileBw_avg(childComplexity), true
|
return e.complexity.Job.FileBwAvg(childComplexity), true
|
||||||
|
|
||||||
case "Job.flopsAny_avg":
|
case "Job.flopsAny_avg":
|
||||||
if e.complexity.Job.FlopsAny_avg == nil {
|
if e.complexity.Job.FlopsAnyAvg == nil {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
||||||
return e.complexity.Job.FlopsAny_avg(childComplexity), true
|
return e.complexity.Job.FlopsAnyAvg(childComplexity), true
|
||||||
|
|
||||||
case "Job.hasProfile":
|
case "Job.hasProfile":
|
||||||
if e.complexity.Job.HasProfile == nil {
|
if e.complexity.Job.HasProfile == nil {
|
||||||
@ -299,25 +299,25 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
|
|||||||
return e.complexity.Job.JobID(childComplexity), true
|
return e.complexity.Job.JobID(childComplexity), true
|
||||||
|
|
||||||
case "Job.memBw_avg":
|
case "Job.memBw_avg":
|
||||||
if e.complexity.Job.MemBw_avg == nil {
|
if e.complexity.Job.MemBwAvg == nil {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
||||||
return e.complexity.Job.MemBw_avg(childComplexity), true
|
return e.complexity.Job.MemBwAvg(childComplexity), true
|
||||||
|
|
||||||
case "Job.memUsed_max":
|
case "Job.memUsed_max":
|
||||||
if e.complexity.Job.MemUsed_max == nil {
|
if e.complexity.Job.MemUsedMax == nil {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
||||||
return e.complexity.Job.MemUsed_max(childComplexity), true
|
return e.complexity.Job.MemUsedMax(childComplexity), true
|
||||||
|
|
||||||
case "Job.netBw_avg":
|
case "Job.netBw_avg":
|
||||||
if e.complexity.Job.NetBw_avg == nil {
|
if e.complexity.Job.NetBwAvg == nil {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
||||||
return e.complexity.Job.NetBw_avg(childComplexity), true
|
return e.complexity.Job.NetBwAvg(childComplexity), true
|
||||||
|
|
||||||
case "Job.numNodes":
|
case "Job.numNodes":
|
||||||
if e.complexity.Job.NumNodes == nil {
|
if e.complexity.Job.NumNodes == nil {
|
||||||
@ -607,7 +607,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
|
|||||||
return 0, false
|
return 0, false
|
||||||
}
|
}
|
||||||
|
|
||||||
return e.complexity.Query.JobMetrics(childComplexity, args["jobId"].(string), args["metrics"].([]*string)), true
|
return e.complexity.Query.JobMetrics(childComplexity, args["jobId"].(string), args["clusterId"].(*string), args["startTime"].(*time.Time), args["metrics"].([]*string)), true
|
||||||
|
|
||||||
case "Query.jobs":
|
case "Query.jobs":
|
||||||
if e.complexity.Query.Jobs == nil {
|
if e.complexity.Query.Jobs == nil {
|
||||||
@ -775,7 +775,7 @@ type Query {
|
|||||||
jobById(jobId: String!): Job
|
jobById(jobId: String!): Job
|
||||||
jobs(filter: JobFilterList, page: PageRequest, order: OrderByInput): JobResultList!
|
jobs(filter: JobFilterList, page: PageRequest, order: OrderByInput): JobResultList!
|
||||||
jobsStatistics(filter: JobFilterList): JobsStatistics!
|
jobsStatistics(filter: JobFilterList): JobsStatistics!
|
||||||
jobMetrics(jobId: String!, metrics: [String]): [JobMetricWithName]!
|
jobMetrics(jobId: String!, clusterId: String, startTime: Time, metrics: [String]): [JobMetricWithName]!
|
||||||
|
|
||||||
# Return all known tags or, if jobId is specified, only tags from this job
|
# Return all known tags or, if jobId is specified, only tags from this job
|
||||||
tags(jobId: String): [JobTag!]!
|
tags(jobId: String): [JobTag!]!
|
||||||
@ -810,8 +810,7 @@ input JobFilterList {
|
|||||||
}
|
}
|
||||||
|
|
||||||
input JobFilter {
|
input JobFilter {
|
||||||
tagName: String
|
tags: [ID!]
|
||||||
tagType: String
|
|
||||||
jobId: StringInput
|
jobId: StringInput
|
||||||
userId: StringInput
|
userId: StringInput
|
||||||
projectId: StringInput
|
projectId: StringInput
|
||||||
@ -936,15 +935,33 @@ func (ec *executionContext) field_Query_jobMetrics_args(ctx context.Context, raw
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
args["jobId"] = arg0
|
args["jobId"] = arg0
|
||||||
var arg1 []*string
|
var arg1 *string
|
||||||
if tmp, ok := rawArgs["metrics"]; ok {
|
if tmp, ok := rawArgs["clusterId"]; ok {
|
||||||
ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("metrics"))
|
ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("clusterId"))
|
||||||
arg1, err = ec.unmarshalOString2ᚕᚖstring(ctx, tmp)
|
arg1, err = ec.unmarshalOString2ᚖstring(ctx, tmp)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
args["metrics"] = arg1
|
args["clusterId"] = arg1
|
||||||
|
var arg2 *time.Time
|
||||||
|
if tmp, ok := rawArgs["startTime"]; ok {
|
||||||
|
ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("startTime"))
|
||||||
|
arg2, err = ec.unmarshalOTime2ᚖtimeᚐTime(ctx, tmp)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
args["startTime"] = arg2
|
||||||
|
var arg3 []*string
|
||||||
|
if tmp, ok := rawArgs["metrics"]; ok {
|
||||||
|
ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("metrics"))
|
||||||
|
arg3, err = ec.unmarshalOString2ᚕᚖstring(ctx, tmp)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
args["metrics"] = arg3
|
||||||
return args, nil
|
return args, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1767,7 +1784,7 @@ func (ec *executionContext) _Job_memUsed_max(ctx context.Context, field graphql.
|
|||||||
ctx = graphql.WithFieldContext(ctx, fc)
|
ctx = graphql.WithFieldContext(ctx, fc)
|
||||||
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
|
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
|
||||||
ctx = rctx // use context from middleware stack in children
|
ctx = rctx // use context from middleware stack in children
|
||||||
return obj.MemUsed_max, nil
|
return obj.MemUsedMax, nil
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
ec.Error(ctx, err)
|
ec.Error(ctx, err)
|
||||||
@ -1799,7 +1816,7 @@ func (ec *executionContext) _Job_flopsAny_avg(ctx context.Context, field graphql
|
|||||||
ctx = graphql.WithFieldContext(ctx, fc)
|
ctx = graphql.WithFieldContext(ctx, fc)
|
||||||
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
|
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
|
||||||
ctx = rctx // use context from middleware stack in children
|
ctx = rctx // use context from middleware stack in children
|
||||||
return obj.FlopsAny_avg, nil
|
return obj.FlopsAnyAvg, nil
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
ec.Error(ctx, err)
|
ec.Error(ctx, err)
|
||||||
@ -1831,7 +1848,7 @@ func (ec *executionContext) _Job_memBw_avg(ctx context.Context, field graphql.Co
|
|||||||
ctx = graphql.WithFieldContext(ctx, fc)
|
ctx = graphql.WithFieldContext(ctx, fc)
|
||||||
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
|
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
|
||||||
ctx = rctx // use context from middleware stack in children
|
ctx = rctx // use context from middleware stack in children
|
||||||
return obj.MemBw_avg, nil
|
return obj.MemBwAvg, nil
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
ec.Error(ctx, err)
|
ec.Error(ctx, err)
|
||||||
@ -1863,7 +1880,7 @@ func (ec *executionContext) _Job_netBw_avg(ctx context.Context, field graphql.Co
|
|||||||
ctx = graphql.WithFieldContext(ctx, fc)
|
ctx = graphql.WithFieldContext(ctx, fc)
|
||||||
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
|
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
|
||||||
ctx = rctx // use context from middleware stack in children
|
ctx = rctx // use context from middleware stack in children
|
||||||
return obj.NetBw_avg, nil
|
return obj.NetBwAvg, nil
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
ec.Error(ctx, err)
|
ec.Error(ctx, err)
|
||||||
@ -1895,7 +1912,7 @@ func (ec *executionContext) _Job_fileBw_avg(ctx context.Context, field graphql.C
|
|||||||
ctx = graphql.WithFieldContext(ctx, fc)
|
ctx = graphql.WithFieldContext(ctx, fc)
|
||||||
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
|
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
|
||||||
ctx = rctx // use context from middleware stack in children
|
ctx = rctx // use context from middleware stack in children
|
||||||
return obj.FileBw_avg, nil
|
return obj.FileBwAvg, nil
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
ec.Error(ctx, err)
|
ec.Error(ctx, err)
|
||||||
@ -3232,7 +3249,7 @@ func (ec *executionContext) _Query_jobMetrics(ctx context.Context, field graphql
|
|||||||
fc.Args = args
|
fc.Args = args
|
||||||
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
|
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
|
||||||
ctx = rctx // use context from middleware stack in children
|
ctx = rctx // use context from middleware stack in children
|
||||||
return ec.resolvers.Query().JobMetrics(rctx, args["jobId"].(string), args["metrics"].([]*string))
|
return ec.resolvers.Query().JobMetrics(rctx, args["jobId"].(string), args["clusterId"].(*string), args["startTime"].(*time.Time), args["metrics"].([]*string))
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
ec.Error(ctx, err)
|
ec.Error(ctx, err)
|
||||||
@ -4579,19 +4596,11 @@ func (ec *executionContext) unmarshalInputJobFilter(ctx context.Context, obj int
|
|||||||
|
|
||||||
for k, v := range asMap {
|
for k, v := range asMap {
|
||||||
switch k {
|
switch k {
|
||||||
case "tagName":
|
case "tags":
|
||||||
var err error
|
var err error
|
||||||
|
|
||||||
ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("tagName"))
|
ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("tags"))
|
||||||
it.TagName, err = ec.unmarshalOString2ᚖstring(ctx, v)
|
it.Tags, err = ec.unmarshalOID2ᚕstringᚄ(ctx, v)
|
||||||
if err != nil {
|
|
||||||
return it, err
|
|
||||||
}
|
|
||||||
case "tagType":
|
|
||||||
var err error
|
|
||||||
|
|
||||||
ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("tagType"))
|
|
||||||
it.TagType, err = ec.unmarshalOString2ᚖstring(ctx, v)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return it, err
|
return it, err
|
||||||
}
|
}
|
||||||
@ -6498,6 +6507,42 @@ func (ec *executionContext) marshalOHistoPoint2ᚖgithubᚗcomᚋClusterCockpit
|
|||||||
return ec._HistoPoint(ctx, sel, v)
|
return ec._HistoPoint(ctx, sel, v)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (ec *executionContext) unmarshalOID2ᚕstringᚄ(ctx context.Context, v interface{}) ([]string, error) {
|
||||||
|
if v == nil {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
var vSlice []interface{}
|
||||||
|
if v != nil {
|
||||||
|
if tmp1, ok := v.([]interface{}); ok {
|
||||||
|
vSlice = tmp1
|
||||||
|
} else {
|
||||||
|
vSlice = []interface{}{v}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
var err error
|
||||||
|
res := make([]string, len(vSlice))
|
||||||
|
for i := range vSlice {
|
||||||
|
ctx := graphql.WithPathContext(ctx, graphql.NewPathWithIndex(i))
|
||||||
|
res[i], err = ec.unmarshalNID2string(ctx, vSlice[i])
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return res, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ec *executionContext) marshalOID2ᚕstringᚄ(ctx context.Context, sel ast.SelectionSet, v []string) graphql.Marshaler {
|
||||||
|
if v == nil {
|
||||||
|
return graphql.Null
|
||||||
|
}
|
||||||
|
ret := make(graphql.Array, len(v))
|
||||||
|
for i := range v {
|
||||||
|
ret[i] = ec.marshalNID2string(ctx, sel, v[i])
|
||||||
|
}
|
||||||
|
|
||||||
|
return ret
|
||||||
|
}
|
||||||
|
|
||||||
func (ec *executionContext) unmarshalOInt2ᚖint(ctx context.Context, v interface{}) (*int, error) {
|
func (ec *executionContext) unmarshalOInt2ᚖint(ctx context.Context, v interface{}) (*int, error) {
|
||||||
if v == nil {
|
if v == nil {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
@ -6729,6 +6774,21 @@ func (ec *executionContext) unmarshalOStringInput2ᚖgithubᚗcomᚋClusterCockp
|
|||||||
return &res, graphql.ErrorOnPath(ctx, err)
|
return &res, graphql.ErrorOnPath(ctx, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (ec *executionContext) unmarshalOTime2ᚖtimeᚐTime(ctx context.Context, v interface{}) (*time.Time, error) {
|
||||||
|
if v == nil {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
res, err := graphql.UnmarshalTime(v)
|
||||||
|
return &res, graphql.ErrorOnPath(ctx, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ec *executionContext) marshalOTime2ᚖtimeᚐTime(ctx context.Context, sel ast.SelectionSet, v *time.Time) graphql.Marshaler {
|
||||||
|
if v == nil {
|
||||||
|
return graphql.Null
|
||||||
|
}
|
||||||
|
return graphql.MarshalTime(*v)
|
||||||
|
}
|
||||||
|
|
||||||
func (ec *executionContext) unmarshalOTimeRange2ᚖgithubᚗcomᚋClusterCockpitᚋccᚑjobarchiveᚋgraphᚋmodelᚐTimeRange(ctx context.Context, v interface{}) (*model.TimeRange, error) {
|
func (ec *executionContext) unmarshalOTimeRange2ᚖgithubᚗcomᚋClusterCockpitᚋccᚑjobarchiveᚋgraphᚋmodelᚐTimeRange(ctx context.Context, v interface{}) (*model.TimeRange, error) {
|
||||||
if v == nil {
|
if v == nil {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
|
@ -5,24 +5,25 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
type Job struct {
|
type Job struct {
|
||||||
ID string `json:"id"`
|
ID string `json:"id"`
|
||||||
JobID string `json:"jobId" db:"job_id"`
|
JobID string `json:"jobId" db:"job_id"`
|
||||||
UserID string `json:"userId" db:"user_id"`
|
UserID string `json:"userId" db:"user_id"`
|
||||||
ProjectID string `json:"projectId" db:"project_id"`
|
ProjectID string `json:"projectId" db:"project_id"`
|
||||||
ClusterID string `json:"clusterId" db:"cluster_id"`
|
ClusterID string `json:"clusterId" db:"cluster_id"`
|
||||||
StartTime time.Time `json:"startTime" db:"start_time"`
|
StartTime time.Time `json:"startTime" db:"start_time"`
|
||||||
Duration int `json:"duration" db:"duration"`
|
Duration int `json:"duration" db:"duration"`
|
||||||
Walltime *int `json:"walltime" db:"walltime"`
|
Walltime *int `json:"walltime" db:"walltime"`
|
||||||
Jobstate *string `json:"jobstate" db:"job_state"`
|
Jobstate *string `json:"jobstate" db:"job_state"`
|
||||||
NumNodes int `json:"numNodes" db:"num_nodes"`
|
NumNodes int `json:"numNodes" db:"num_nodes"`
|
||||||
NodeList string `json:"nodelist" db:"node_list"`
|
NodeList string `json:"nodelist" db:"node_list"`
|
||||||
HasProfile bool `json:"hasProfile" db:"has_profile"`
|
HasProfile bool `json:"hasProfile" db:"has_profile"`
|
||||||
MemUsed_max *float64 `json:"memUsedMax" db:"mem_used_max"`
|
MemUsedMax *float64 `json:"memUsedMax" db:"mem_used_max"`
|
||||||
FlopsAny_avg *float64 `json:"flopsAnyAvg" db:"flops_any_avg"`
|
FlopsAnyAvg *float64 `json:"flopsAnyAvg" db:"flops_any_avg"`
|
||||||
MemBw_avg *float64 `json:"memBwAvg" db:"mem_bw_avg"`
|
MemBwAvg *float64 `json:"memBwAvg" db:"mem_bw_avg"`
|
||||||
NetBw_avg *float64 `json:"netBwAvg" db:"net_bw_avg"`
|
NetBwAvg *float64 `json:"netBwAvg" db:"net_bw_avg"`
|
||||||
FileBw_avg *float64 `json:"fileBwAvg" db:"file_bw_avg"`
|
FileBwAvg *float64 `json:"fileBwAvg" db:"file_bw_avg"`
|
||||||
Tags []JobTag `json:"tags"`
|
LoadAvg *float64 `json:"loadAvg" db:"load_avg"`
|
||||||
|
Tags []JobTag `json:"tags"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type JobTag struct {
|
type JobTag struct {
|
||||||
|
@ -35,8 +35,7 @@ type IntRange struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type JobFilter struct {
|
type JobFilter struct {
|
||||||
TagName *string `json:"tagName"`
|
Tags []string `json:"tags"`
|
||||||
TagType *string `json:"tagType"`
|
|
||||||
JobID *StringInput `json:"jobId"`
|
JobID *StringInput `json:"jobId"`
|
||||||
UserID *StringInput `json:"userId"`
|
UserID *StringInput `json:"userId"`
|
||||||
ProjectID *StringInput `json:"projectId"`
|
ProjectID *StringInput `json:"projectId"`
|
||||||
|
@ -9,6 +9,8 @@ import (
|
|||||||
"os"
|
"os"
|
||||||
"strconv"
|
"strconv"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
|
"time"
|
||||||
|
"regexp"
|
||||||
|
|
||||||
"github.com/ClusterCockpit/cc-jobarchive/graph/generated"
|
"github.com/ClusterCockpit/cc-jobarchive/graph/generated"
|
||||||
"github.com/ClusterCockpit/cc-jobarchive/graph/model"
|
"github.com/ClusterCockpit/cc-jobarchive/graph/model"
|
||||||
@ -61,19 +63,10 @@ func buildQueryConditions(filterList *model.JobFilterList) (string, string) {
|
|||||||
var conditions []string
|
var conditions []string
|
||||||
var join string
|
var join string
|
||||||
|
|
||||||
joinJobtags := `
|
|
||||||
JOIN jobtag ON jobtag.job_id = job.id
|
|
||||||
JOIN tag ON tag.id = jobtag.tag_id
|
|
||||||
`
|
|
||||||
|
|
||||||
for _, condition := range filterList.List {
|
for _, condition := range filterList.List {
|
||||||
if condition.TagName != nil {
|
if condition.Tags != nil && len(condition.Tags) > 0 {
|
||||||
conditions = append(conditions, fmt.Sprintf("tag.tag_name = '%s'", *condition.TagName))
|
conditions = append(conditions, "jobtag.tag_id IN ('" + strings.Join(condition.Tags, "', '") + "')")
|
||||||
join = joinJobtags
|
join = ` JOIN jobtag ON jobtag.job_id = job.id `
|
||||||
}
|
|
||||||
if condition.TagType != nil {
|
|
||||||
conditions = append(conditions, fmt.Sprintf("tag.tag_type = '%s'", *condition.TagType))
|
|
||||||
join = joinJobtags
|
|
||||||
}
|
}
|
||||||
if condition.JobID != nil {
|
if condition.JobID != nil {
|
||||||
conditions = addStringCondition(conditions, `job.job_id`, condition.JobID)
|
conditions = addStringCondition(conditions, `job.job_id`, condition.JobID)
|
||||||
@ -101,8 +94,7 @@ func buildQueryConditions(filterList *model.JobFilterList) (string, string) {
|
|||||||
return strings.Join(conditions, " AND "), join
|
return strings.Join(conditions, " AND "), join
|
||||||
}
|
}
|
||||||
|
|
||||||
func readJobDataFile(jobId string) ([]byte, error) {
|
func readJobDataFile(jobId string, clusterId *string, startTime *time.Time) ([]byte, error) {
|
||||||
// TODO: Use suffix as cluster-id!
|
|
||||||
jobId = strings.Split(jobId, ".")[0]
|
jobId = strings.Split(jobId, ".")[0]
|
||||||
id, err := strconv.Atoi(jobId)
|
id, err := strconv.Atoi(jobId)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -110,7 +102,13 @@ func readJobDataFile(jobId string) ([]byte, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
lvl1, lvl2 := id / 1000, id % 1000
|
lvl1, lvl2 := id / 1000, id % 1000
|
||||||
filepath := fmt.Sprintf("./job-data/%d/%03d/data.json", lvl1, lvl2)
|
var filepath string
|
||||||
|
if clusterId == nil {
|
||||||
|
filepath = fmt.Sprintf("./job-data/%d/%03d/data.json", lvl1, lvl2)
|
||||||
|
} else {
|
||||||
|
filepath = fmt.Sprintf("./job-data/%s/%d/%03d/data.json", *clusterId, lvl1, lvl2)
|
||||||
|
}
|
||||||
|
|
||||||
f, err := os.ReadFile(filepath)
|
f, err := os.ReadFile(filepath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@ -128,6 +126,14 @@ func contains(s []*string, e string) bool {
|
|||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func toSnakeCase(str string) string {
|
||||||
|
matchFirstCap := regexp.MustCompile("(.)([A-Z][a-z]+)")
|
||||||
|
matchAllCap := regexp.MustCompile("([a-z0-9])([A-Z])")
|
||||||
|
snake := matchFirstCap.ReplaceAllString(str, "${1}_${2}")
|
||||||
|
snake = matchAllCap.ReplaceAllString(snake, "${1}_${2}")
|
||||||
|
return strings.ToLower(snake)
|
||||||
|
}
|
||||||
|
|
||||||
// Queries
|
// Queries
|
||||||
|
|
||||||
func (r *queryResolver) JobByID(
|
func (r *queryResolver) JobByID(
|
||||||
@ -177,7 +183,8 @@ func (r *queryResolver) Jobs(
|
|||||||
}
|
}
|
||||||
|
|
||||||
if orderBy != nil {
|
if orderBy != nil {
|
||||||
ob = fmt.Sprintf("ORDER BY %s %s", orderBy.Field, *orderBy.Order)
|
ob = fmt.Sprintf("ORDER BY %s %s",
|
||||||
|
toSnakeCase(orderBy.Field), *orderBy.Order)
|
||||||
}
|
}
|
||||||
|
|
||||||
qstr := `SELECT job.* `
|
qstr := `SELECT job.* `
|
||||||
@ -322,18 +329,14 @@ func (r *queryResolver) Clusters(ctx context.Context) ([]*model.Cluster, error)
|
|||||||
|
|
||||||
func (r *queryResolver) JobMetrics(
|
func (r *queryResolver) JobMetrics(
|
||||||
ctx context.Context, jobId string,
|
ctx context.Context, jobId string,
|
||||||
|
clusterId *string, startTime *time.Time,
|
||||||
metrics []*string) ([]*model.JobMetricWithName, error) {
|
metrics []*string) ([]*model.JobMetricWithName, error) {
|
||||||
|
|
||||||
f, err := readJobDataFile(jobId)
|
f, err := readJobDataFile(jobId, clusterId, startTime)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
|
||||||
* GraphQL has no Map-Type, so
|
|
||||||
* this is the best i could come up with.
|
|
||||||
* This is only for testing anyways?
|
|
||||||
*/
|
|
||||||
var list []*model.JobMetricWithName
|
var list []*model.JobMetricWithName
|
||||||
var metricMap map[string]*model.JobMetric
|
var metricMap map[string]*model.JobMetric
|
||||||
|
|
||||||
|
@ -78,7 +78,7 @@ type Query {
|
|||||||
jobById(jobId: String!): Job
|
jobById(jobId: String!): Job
|
||||||
jobs(filter: JobFilterList, page: PageRequest, order: OrderByInput): JobResultList!
|
jobs(filter: JobFilterList, page: PageRequest, order: OrderByInput): JobResultList!
|
||||||
jobsStatistics(filter: JobFilterList): JobsStatistics!
|
jobsStatistics(filter: JobFilterList): JobsStatistics!
|
||||||
jobMetrics(jobId: String!, metrics: [String]): [JobMetricWithName]!
|
jobMetrics(jobId: String!, clusterId: String, startTime: Time, metrics: [String]): [JobMetricWithName]!
|
||||||
|
|
||||||
# Return all known tags or, if jobId is specified, only tags from this job
|
# Return all known tags or, if jobId is specified, only tags from this job
|
||||||
tags(jobId: String): [JobTag!]!
|
tags(jobId: String): [JobTag!]!
|
||||||
@ -113,8 +113,7 @@ input JobFilterList {
|
|||||||
}
|
}
|
||||||
|
|
||||||
input JobFilter {
|
input JobFilter {
|
||||||
tagName: String
|
tags: [ID!]
|
||||||
tagType: String
|
|
||||||
jobId: StringInput
|
jobId: StringInput
|
||||||
userId: StringInput
|
userId: StringInput
|
||||||
projectId: StringInput
|
projectId: StringInput
|
||||||
|
Loading…
Reference in New Issue
Block a user