mirror of
https://github.com/ClusterCockpit/cc-backend
synced 2024-12-26 13:29:05 +01:00
Update GraphQL schema
This commit is contained in:
parent
341091a796
commit
e4c0b3e955
16
graph/analysis.go
Normal file
16
graph/analysis.go
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
package graph
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"errors"
|
||||||
|
|
||||||
|
"github.com/ClusterCockpit/cc-jobarchive/graph/model"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (r *queryResolver) JobMetricAverages(ctx context.Context, filter model.JobFilterList, metrics []*string) ([][]*float64, error) {
|
||||||
|
return nil, errors.New("unimplemented")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *queryResolver) RooflineHeatmap(ctx context.Context, filter model.JobFilterList, rows, cols int, minX, minY, maxX, maxY float64) ([][]float64, error) {
|
||||||
|
return nil, errors.New("unimplemented")
|
||||||
|
}
|
10
graph/config.go
Normal file
10
graph/config.go
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
package graph
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"errors"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (r *mutationResolver) UpdateConfiguration(ctx context.Context, key, value string) (*string, error) {
|
||||||
|
return nil, errors.New("unimplemented")
|
||||||
|
}
|
File diff suppressed because it is too large
Load Diff
@ -44,7 +44,7 @@ type JobFilter struct {
|
|||||||
Duration *IntRange `json:"duration"`
|
Duration *IntRange `json:"duration"`
|
||||||
NumNodes *IntRange `json:"numNodes"`
|
NumNodes *IntRange `json:"numNodes"`
|
||||||
StartTime *TimeRange `json:"startTime"`
|
StartTime *TimeRange `json:"startTime"`
|
||||||
HasProfile *bool `json:"hasProfile"`
|
IsRunning *bool `json:"isRunning"`
|
||||||
FlopsAnyAvg *FloatRange `json:"flopsAnyAvg"`
|
FlopsAnyAvg *FloatRange `json:"flopsAnyAvg"`
|
||||||
MemBwAvg *FloatRange `json:"memBwAvg"`
|
MemBwAvg *FloatRange `json:"memBwAvg"`
|
||||||
LoadAvg *FloatRange `json:"loadAvg"`
|
LoadAvg *FloatRange `json:"loadAvg"`
|
||||||
@ -111,8 +111,8 @@ type OrderByInput struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type PageRequest struct {
|
type PageRequest struct {
|
||||||
ItemsPerPage *int `json:"itemsPerPage"`
|
ItemsPerPage int `json:"itemsPerPage"`
|
||||||
Page *int `json:"page"`
|
Page int `json:"page"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type StringInput struct {
|
type StringInput struct {
|
||||||
@ -132,6 +132,13 @@ type TimeRangeOutput struct {
|
|||||||
To time.Time `json:"to"`
|
To time.Time `json:"to"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type UserStats struct {
|
||||||
|
UserID string `json:"userId"`
|
||||||
|
TotalJobs int `json:"totalJobs"`
|
||||||
|
TotalWalltime float64 `json:"totalWalltime"`
|
||||||
|
TotalCoreHours float64 `json:"totalCoreHours"`
|
||||||
|
}
|
||||||
|
|
||||||
type JobMetricScope string
|
type JobMetricScope string
|
||||||
|
|
||||||
const (
|
const (
|
||||||
|
@ -114,7 +114,7 @@ func buildQueryConditions(filterList *model.JobFilterList) (string, string) {
|
|||||||
return strings.Join(conditions, " AND "), join
|
return strings.Join(conditions, " AND "), join
|
||||||
}
|
}
|
||||||
|
|
||||||
func readJobDataFile(jobId string, clusterId *string, startTime *time.Time) ([]byte, error) {
|
func readJobDataFile(jobId string, clusterId *string) ([]byte, error) {
|
||||||
jobId = strings.Split(jobId, ".")[0]
|
jobId = strings.Split(jobId, ".")[0]
|
||||||
id, err := strconv.Atoi(jobId)
|
id, err := strconv.Atoi(jobId)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -205,13 +205,8 @@ func (r *queryResolver) Jobs(
|
|||||||
var limit, offset int
|
var limit, offset int
|
||||||
var qc, ob, jo string
|
var qc, ob, jo string
|
||||||
|
|
||||||
if page != nil {
|
limit = page.ItemsPerPage
|
||||||
limit = *page.ItemsPerPage
|
offset = (page.Page - 1) * limit
|
||||||
offset = (*page.Page - 1) * limit
|
|
||||||
} else {
|
|
||||||
limit = 20
|
|
||||||
offset = 0
|
|
||||||
}
|
|
||||||
|
|
||||||
if filterList != nil {
|
if filterList != nil {
|
||||||
qc, jo = buildQueryConditions(filterList)
|
qc, jo = buildQueryConditions(filterList)
|
||||||
@ -371,11 +366,10 @@ func (r *queryResolver) Clusters(ctx context.Context) ([]*model.Cluster, error)
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (r *queryResolver) JobMetrics(
|
func (r *queryResolver) JobMetrics(
|
||||||
ctx context.Context, jobId string,
|
ctx context.Context, jobId string, clusterId *string,
|
||||||
clusterId *string, startTime *time.Time,
|
|
||||||
metrics []*string) ([]*model.JobMetricWithName, error) {
|
metrics []*string) ([]*model.JobMetricWithName, error) {
|
||||||
|
|
||||||
f, err := readJobDataFile(jobId, clusterId, startTime)
|
f, err := readJobDataFile(jobId, clusterId)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -593,4 +587,4 @@ func (r *Resolver) Mutation() generated.MutationResolver { return &mutationResol
|
|||||||
type jobResolver struct{ *Resolver }
|
type jobResolver struct{ *Resolver }
|
||||||
type clusterResolver struct{ *Resolver }
|
type clusterResolver struct{ *Resolver }
|
||||||
type queryResolver struct{ *Resolver }
|
type queryResolver struct{ *Resolver }
|
||||||
type mutationResolver struct { *Resolver }
|
type mutationResolver struct{ *Resolver }
|
||||||
|
@ -1,6 +1,5 @@
|
|||||||
type Job {
|
type Job {
|
||||||
id: ID!
|
id: ID!
|
||||||
|
|
||||||
jobId: String!
|
jobId: String!
|
||||||
userId: String!
|
userId: String!
|
||||||
projectId: String!
|
projectId: String!
|
||||||
@ -9,6 +8,7 @@ type Job {
|
|||||||
duration: Int!
|
duration: Int!
|
||||||
numNodes: Int!
|
numNodes: Int!
|
||||||
hasProfile: Boolean!
|
hasProfile: Boolean!
|
||||||
|
tags: [JobTag!]!
|
||||||
|
|
||||||
loadAvg: Float
|
loadAvg: Float
|
||||||
memUsedMax: Float
|
memUsedMax: Float
|
||||||
@ -16,8 +16,12 @@ type Job {
|
|||||||
memBwAvg: Float
|
memBwAvg: Float
|
||||||
netBwAvg: Float
|
netBwAvg: Float
|
||||||
fileBwAvg: Float
|
fileBwAvg: Float
|
||||||
|
}
|
||||||
|
|
||||||
tags: [JobTag!]
|
type JobTag {
|
||||||
|
id: ID!
|
||||||
|
tagType: String!
|
||||||
|
tagName: String!
|
||||||
}
|
}
|
||||||
|
|
||||||
type Cluster {
|
type Cluster {
|
||||||
@ -68,23 +72,22 @@ type JobMetricStatistics {
|
|||||||
max: Float!
|
max: Float!
|
||||||
}
|
}
|
||||||
|
|
||||||
type JobTag {
|
type JobMetricWithName {
|
||||||
id: ID!
|
name: String!
|
||||||
tagType: String!
|
metric: JobMetric!
|
||||||
tagName: String!
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type Query {
|
type Query {
|
||||||
clusters: [Cluster!]!
|
clusters: [Cluster!]!
|
||||||
|
|
||||||
jobById(id: ID!): Job
|
jobById(id: ID!): Job
|
||||||
jobs(filter: JobFilterList, page: PageRequest, order: OrderByInput): JobResultList!
|
jobs(filter: JobFilterList, page: PageRequest, order: OrderByInput): JobResultList!
|
||||||
jobsStatistics(filter: JobFilterList): JobsStatistics!
|
jobsStatistics(filter: JobFilterList): JobsStatistics!
|
||||||
jobMetrics(jobId: String!, clusterId: String, startTime: Time, metrics: [String]): [JobMetricWithName]!
|
jobMetrics(jobId: String!, clusterId: String, metrics: [String]): [JobMetricWithName]!
|
||||||
|
jobMetricAverages(filter: JobFilterList!, metrics: [String]!): [[Float]]!
|
||||||
|
rooflineHeatmap(filter: JobFilterList!, rows: Int!, cols: Int!, minX: Float!, minY: Float!, maxX: Float!, maxY: Float!): [[Float!]]!
|
||||||
tags: [JobTag!]!
|
tags: [JobTag!]!
|
||||||
|
|
||||||
filterRanges: FilterRanges!
|
filterRanges: FilterRanges!
|
||||||
|
userStats(startTime: Time, stopTime: Time, clusterId: String): [UserStats!]!
|
||||||
}
|
}
|
||||||
|
|
||||||
type Mutation {
|
type Mutation {
|
||||||
@ -92,26 +95,8 @@ type Mutation {
|
|||||||
deleteTag(id: ID!): ID!
|
deleteTag(id: ID!): ID!
|
||||||
addTagsToJob(job: ID!, tagIds: [ID!]!): [JobTag!]!
|
addTagsToJob(job: ID!, tagIds: [ID!]!): [JobTag!]!
|
||||||
removeTagsFromJob(job: ID!, tagIds: [ID!]!): [JobTag!]!
|
removeTagsFromJob(job: ID!, tagIds: [ID!]!): [JobTag!]!
|
||||||
}
|
|
||||||
|
|
||||||
input JobFilterList {
|
updateConfiguration(name: String!, value: String!): String
|
||||||
list: [JobFilter]
|
|
||||||
}
|
|
||||||
|
|
||||||
input JobFilter {
|
|
||||||
tags: [ID!]
|
|
||||||
jobId: StringInput
|
|
||||||
userId: StringInput
|
|
||||||
projectId: StringInput
|
|
||||||
clusterId: StringInput
|
|
||||||
duration: IntRange
|
|
||||||
numNodes: IntRange
|
|
||||||
startTime: TimeRange
|
|
||||||
hasProfile: Boolean
|
|
||||||
flopsAnyAvg: FloatRange
|
|
||||||
memBwAvg: FloatRange
|
|
||||||
loadAvg: FloatRange
|
|
||||||
memUsedMax: FloatRange
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type IntRangeOutput {
|
type IntRangeOutput {
|
||||||
@ -130,6 +115,26 @@ type FilterRanges {
|
|||||||
startTime: TimeRangeOutput!
|
startTime: TimeRangeOutput!
|
||||||
}
|
}
|
||||||
|
|
||||||
|
input JobFilterList {
|
||||||
|
list: [JobFilter]
|
||||||
|
}
|
||||||
|
|
||||||
|
input JobFilter {
|
||||||
|
tags: [ID!]
|
||||||
|
jobId: StringInput
|
||||||
|
userId: StringInput
|
||||||
|
projectId: StringInput
|
||||||
|
clusterId: StringInput
|
||||||
|
duration: IntRange
|
||||||
|
numNodes: IntRange
|
||||||
|
startTime: TimeRange
|
||||||
|
isRunning: Boolean
|
||||||
|
flopsAnyAvg: FloatRange
|
||||||
|
memBwAvg: FloatRange
|
||||||
|
loadAvg: FloatRange
|
||||||
|
memUsedMax: FloatRange
|
||||||
|
}
|
||||||
|
|
||||||
input OrderByInput {
|
input OrderByInput {
|
||||||
field: String!
|
field: String!
|
||||||
order: SortDirectionEnum = ASC
|
order: SortDirectionEnum = ASC
|
||||||
@ -169,11 +174,6 @@ type JobResultList {
|
|||||||
count: Int
|
count: Int
|
||||||
}
|
}
|
||||||
|
|
||||||
type JobMetricWithName {
|
|
||||||
name: String!
|
|
||||||
metric: JobMetric!
|
|
||||||
}
|
|
||||||
|
|
||||||
type HistoPoint {
|
type HistoPoint {
|
||||||
count: Int!
|
count: Int!
|
||||||
value: Int!
|
value: Int!
|
||||||
@ -188,9 +188,16 @@ type JobsStatistics {
|
|||||||
histNumNodes: [HistoPoint]!
|
histNumNodes: [HistoPoint]!
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type UserStats {
|
||||||
|
userId: ID!
|
||||||
|
totalJobs: Int!
|
||||||
|
totalWalltime: Float!
|
||||||
|
totalCoreHours: Float!
|
||||||
|
}
|
||||||
|
|
||||||
input PageRequest {
|
input PageRequest {
|
||||||
itemsPerPage: Int
|
itemsPerPage: Int!
|
||||||
page: Int
|
page: Int!
|
||||||
}
|
}
|
||||||
|
|
||||||
scalar Time
|
scalar Time
|
||||||
|
13
graph/users.go
Normal file
13
graph/users.go
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
package graph
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"errors"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/ClusterCockpit/cc-jobarchive/graph/model"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (r *queryResolver) UserStats(ctx context.Context, from *time.Time, to *time.Time, clusterId *string) ([]*model.UserStats, error) {
|
||||||
|
return nil, errors.New("unimplemented")
|
||||||
|
}
|
Loading…
Reference in New Issue
Block a user