New GraphQL schema

This commit is contained in:
Lou Knauer 2021-10-26 10:22:02 +02:00
parent 236f51ba9a
commit b8d23f8ea1
7 changed files with 1328 additions and 1292 deletions

View File

@ -55,15 +55,17 @@ models:
- github.com/99designs/gqlgen/graphql.Int64 - github.com/99designs/gqlgen/graphql.Int64
- github.com/99designs/gqlgen/graphql.Int32 - github.com/99designs/gqlgen/graphql.Int32
Job: Job:
model: "github.com/ClusterCockpit/cc-jobarchive/graph/model.Job"
fields: fields:
tags: tags:
resolver: true resolver: true
Cluster: JobMetric:
fields: model: "github.com/ClusterCockpit/cc-jobarchive/schema.JobMetric"
filterRanges: JobMetricSeries:
resolver: true model: "github.com/ClusterCockpit/cc-jobarchive/schema.MetricSeries"
JobTag: JobMetricStatistics:
model: "github.com/ClusterCockpit/cc-jobarchive/graph/model.JobTag" model: "github.com/ClusterCockpit/cc-jobarchive/schema.MetricStatistics"
Timestamp: NullableFloat:
model: "github.com/ClusterCockpit/cc-jobarchive/graph/model.Timestamp" model: "github.com/ClusterCockpit/cc-jobarchive/schema.Float"
JobMetricScope:
model: "github.com/ClusterCockpit/cc-jobarchive/schema.MetricScope"

File diff suppressed because it is too large Load Diff

View File

@ -1,45 +1,9 @@
package model package model
import ( // Go look at `gqlgen.yml` and the schema package for other non-generated models.
"time"
)
type Job struct {
ID string `json:"id"`
JobID string `json:"jobId" db:"job_id"`
UserID string `json:"userId" db:"user_id"`
ProjectID string `json:"projectId" db:"project_id"`
ClusterID string `json:"clusterId" db:"cluster_id"`
StartTime time.Time `json:"startTime" db:"start_time"`
Duration int `json:"duration" db:"duration"`
Walltime *int `json:"walltime" db:"walltime"`
Jobstate *string `json:"jobstate" db:"job_state"`
NumNodes int `json:"numNodes" db:"num_nodes"`
NodeList string `json:"nodelist" db:"node_list"`
HasProfile bool `json:"hasProfile" db:"has_profile"`
MemUsedMax *float64 `json:"memUsedMax" db:"mem_used_max"`
FlopsAnyAvg *float64 `json:"flopsAnyAvg" db:"flops_any_avg"`
MemBwAvg *float64 `json:"memBwAvg" db:"mem_bw_avg"`
NetBwAvg *float64 `json:"netBwAvg" db:"net_bw_avg"`
FileBwAvg *float64 `json:"fileBwAvg" db:"file_bw_avg"`
LoadAvg *float64 `json:"loadAvg" db:"load_avg"`
Tags []JobTag `json:"tags"`
}
type JobTag struct { type JobTag struct {
ID string `db:"id"` ID string `json:"id" db:"id"`
TagType string `db:"tag_type"` TagType string `json:"tagType" db:"tag_type"`
TagName string `db:"tag_name"` TagName string `json:"tagName" db:"tag_name"`
}
type Cluster struct {
ClusterID string `json:"clusterID"`
ProcessorType string `json:"processorType"`
SocketsPerNode int `json:"socketsPerNode"`
CoresPerSocket int `json:"coresPerSocket"`
ThreadsPerCore int `json:"threadsPerCore"`
FlopRateScalar int `json:"flopRateScalar"`
FlopRateSimd int `json:"flopRateSimd"`
MemoryBandwidth int `json:"memoryBandwidth"`
MetricConfig []MetricConfig `json:"metricConfig"`
} }

View File

@ -7,8 +7,23 @@ import (
"io" "io"
"strconv" "strconv"
"time" "time"
"github.com/ClusterCockpit/cc-jobarchive/schema"
) )
type Cluster struct {
ClusterID string `json:"clusterID"`
ProcessorType string `json:"processorType"`
SocketsPerNode int `json:"socketsPerNode"`
CoresPerSocket int `json:"coresPerSocket"`
ThreadsPerCore int `json:"threadsPerCore"`
FlopRateScalar int `json:"flopRateScalar"`
FlopRateSimd int `json:"flopRateSimd"`
MemoryBandwidth int `json:"memoryBandwidth"`
MetricConfig []*MetricConfig `json:"metricConfig"`
FilterRanges *FilterRanges `json:"filterRanges"`
}
type FilterRanges struct { type FilterRanges struct {
Duration *IntRangeOutput `json:"duration"` Duration *IntRangeOutput `json:"duration"`
NumNodes *IntRangeOutput `json:"numNodes"` NumNodes *IntRangeOutput `json:"numNodes"`
@ -35,6 +50,27 @@ type IntRangeOutput struct {
To int `json:"to"` To int `json:"to"`
} }
type Job struct {
ID string `json:"id"`
JobID string `json:"jobId"`
UserID string `json:"userId"`
ProjectID string `json:"projectId"`
ClusterID string `json:"clusterId"`
StartTime time.Time `json:"startTime"`
Duration int `json:"duration"`
NumNodes int `json:"numNodes"`
Nodes []string `json:"nodes"`
HasProfile bool `json:"hasProfile"`
State JobState `json:"state"`
Tags []*JobTag `json:"tags"`
LoadAvg *float64 `json:"loadAvg"`
MemUsedMax *float64 `json:"memUsedMax"`
FlopsAnyAvg *float64 `json:"flopsAnyAvg"`
MemBwAvg *float64 `json:"memBwAvg"`
NetBwAvg *float64 `json:"netBwAvg"`
FileBwAvg *float64 `json:"fileBwAvg"`
}
type JobFilter struct { type JobFilter struct {
Tags []string `json:"tags"` Tags []string `json:"tags"`
JobID *StringInput `json:"jobId"` JobID *StringInput `json:"jobId"`
@ -51,32 +87,9 @@ type JobFilter struct {
MemUsedMax *FloatRange `json:"memUsedMax"` MemUsedMax *FloatRange `json:"memUsedMax"`
} }
type JobFilterList struct {
List []*JobFilter `json:"list"`
}
type JobMetric struct {
Unit string `json:"unit"`
Scope JobMetricScope `json:"scope"`
Timestep int `json:"timestep"`
Series []*JobMetricSeries `json:"series"`
}
type JobMetricSeries struct {
NodeID string `json:"node_id"`
Statistics *JobMetricStatistics `json:"statistics"`
Data []*float64 `json:"data"`
}
type JobMetricStatistics struct {
Avg float64 `json:"avg"`
Min float64 `json:"min"`
Max float64 `json:"max"`
}
type JobMetricWithName struct { type JobMetricWithName struct {
Name string `json:"name"` Name string `json:"name"`
Metric *JobMetric `json:"metric"` Metric *schema.JobMetric `json:"metric"`
} }
type JobResultList struct { type JobResultList struct {
@ -87,6 +100,7 @@ type JobResultList struct {
} }
type JobsStatistics struct { type JobsStatistics struct {
ID string `json:"id"`
TotalJobs int `json:"totalJobs"` TotalJobs int `json:"totalJobs"`
ShortJobs int `json:"shortJobs"` ShortJobs int `json:"shortJobs"`
TotalWalltime int `json:"totalWalltime"` TotalWalltime int `json:"totalWalltime"`
@ -105,9 +119,14 @@ type MetricConfig struct {
Alert int `json:"alert"` Alert int `json:"alert"`
} }
type MetricFootprints struct {
Name string `json:"name"`
Footprints []schema.Float `json:"footprints"`
}
type OrderByInput struct { type OrderByInput struct {
Field string `json:"field"` Field string `json:"field"`
Order *SortDirectionEnum `json:"order"` Order SortDirectionEnum `json:"order"`
} }
type PageRequest struct { type PageRequest struct {
@ -123,8 +142,8 @@ type StringInput struct {
} }
type TimeRange struct { type TimeRange struct {
From time.Time `json:"from"` From *time.Time `json:"from"`
To time.Time `json:"to"` To *time.Time `json:"to"`
} }
type TimeRangeOutput struct { type TimeRangeOutput struct {
@ -132,53 +151,87 @@ type TimeRangeOutput struct {
To time.Time `json:"to"` To time.Time `json:"to"`
} }
type UserStats struct { type Aggregate string
UserID string `json:"userId"`
TotalJobs int `json:"totalJobs"`
TotalWalltime float64 `json:"totalWalltime"`
TotalCoreHours float64 `json:"totalCoreHours"`
}
type JobMetricScope string
const ( const (
JobMetricScopeNode JobMetricScope = "node" AggregateUser Aggregate = "USER"
JobMetricScopeCPU JobMetricScope = "cpu" AggregateProject Aggregate = "PROJECT"
JobMetricScopeSocket JobMetricScope = "socket" AggregateCluster Aggregate = "CLUSTER"
) )
var AllJobMetricScope = []JobMetricScope{ var AllAggregate = []Aggregate{
JobMetricScopeNode, AggregateUser,
JobMetricScopeCPU, AggregateProject,
JobMetricScopeSocket, AggregateCluster,
} }
func (e JobMetricScope) IsValid() bool { func (e Aggregate) IsValid() bool {
switch e { switch e {
case JobMetricScopeNode, JobMetricScopeCPU, JobMetricScopeSocket: case AggregateUser, AggregateProject, AggregateCluster:
return true return true
} }
return false return false
} }
func (e JobMetricScope) String() string { func (e Aggregate) String() string {
return string(e) return string(e)
} }
func (e *JobMetricScope) UnmarshalGQL(v interface{}) error { func (e *Aggregate) UnmarshalGQL(v interface{}) error {
str, ok := v.(string) str, ok := v.(string)
if !ok { if !ok {
return fmt.Errorf("enums must be strings") return fmt.Errorf("enums must be strings")
} }
*e = JobMetricScope(str) *e = Aggregate(str)
if !e.IsValid() { if !e.IsValid() {
return fmt.Errorf("%s is not a valid JobMetricScope", str) return fmt.Errorf("%s is not a valid Aggregate", str)
} }
return nil return nil
} }
func (e JobMetricScope) MarshalGQL(w io.Writer) { func (e Aggregate) MarshalGQL(w io.Writer) {
fmt.Fprint(w, strconv.Quote(e.String()))
}
type JobState string
const (
JobStateRunning JobState = "running"
JobStateCompleted JobState = "completed"
)
var AllJobState = []JobState{
JobStateRunning,
JobStateCompleted,
}
func (e JobState) IsValid() bool {
switch e {
case JobStateRunning, JobStateCompleted:
return true
}
return false
}
func (e JobState) String() string {
return string(e)
}
func (e *JobState) UnmarshalGQL(v interface{}) error {
str, ok := v.(string)
if !ok {
return fmt.Errorf("enums must be strings")
}
*e = JobState(str)
if !e.IsValid() {
return fmt.Errorf("%s is not a valid JobState", str)
}
return nil
}
func (e JobState) MarshalGQL(w io.Writer) {
fmt.Fprint(w, strconv.Quote(e.String())) fmt.Fprint(w, strconv.Quote(e.String()))
} }

View File

@ -1,15 +1,18 @@
type Job { type Job {
id: ID! id: ID! # Database ID, unique
jobId: String! jobId: String! # ID given to the job by the cluster scheduler
userId: String! userId: String! # Username
projectId: String! projectId: String! # Project
clusterId: String! clusterId: String! # Name of the cluster this job was running on
startTime: Time! startTime: Time! # RFC3339 formated string
duration: Int! duration: Int! # For running jobs, the time it has already run
numNodes: Int! numNodes: Int! # Number of nodes this job was running on
hasProfile: Boolean! nodes: [String!]! # List of hostnames
tags: [JobTag!]! hasProfile: Boolean! # TODO: Could be removed?
state: JobState! # State of the job
tags: [JobTag!]! # List of tags this job has
# Will be null for running jobs.
loadAvg: Float loadAvg: Float
memUsedMax: Float memUsedMax: Float
flopsAnyAvg: Float flopsAnyAvg: Float
@ -18,10 +21,16 @@ type Job {
fileBwAvg: Float fileBwAvg: Float
} }
# TODO: Extend by more possible states?
enum JobState {
running
completed
}
type JobTag { type JobTag {
id: ID! id: ID! # Database ID, unique
tagType: String! tagType: String! # Type
tagName: String! tagName: String! # Name
} }
type Cluster { type Cluster {
@ -51,19 +60,13 @@ type JobMetric {
unit: String! unit: String!
scope: JobMetricScope! scope: JobMetricScope!
timestep: Int! timestep: Int!
series: [JobMetricSeries]! series: [JobMetricSeries!]!
}
enum JobMetricScope {
node
cpu
socket
} }
type JobMetricSeries { type JobMetricSeries {
node_id: String! node_id: String!
statistics: JobMetricStatistics statistics: JobMetricStatistics
data: [Float]! data: [NullableFloat!]!
} }
type JobMetricStatistics { type JobMetricStatistics {
@ -77,17 +80,25 @@ type JobMetricWithName {
metric: JobMetric! metric: JobMetric!
} }
type MetricFootprints {
name: String!
footprints: [NullableFloat!]!
}
enum Aggregate { USER, PROJECT, CLUSTER }
type Query { type Query {
clusters: [Cluster!]! clusters: [Cluster!]! # List of all clusters
jobById(id: ID!): Job tags: [JobTag!]! # List of all tags
jobs(filter: JobFilterList, page: PageRequest, order: OrderByInput): JobResultList!
jobsStatistics(filter: JobFilterList): JobsStatistics! job(id: ID!): Job
jobMetrics(jobId: String!, clusterId: String, metrics: [String]): [JobMetricWithName]! jobMetrics(id: ID!, metrics: [String!]): [JobMetricWithName!]!
jobMetricAverages(filter: JobFilterList!, metrics: [String]!): [[Float]]! jobsFootprints(filter: [JobFilter!], metrics: [String!]!): [MetricFootprints]!
rooflineHeatmap(filter: JobFilterList!, rows: Int!, cols: Int!, minX: Float!, minY: Float!, maxX: Float!, maxY: Float!): [[Float!]]!
tags: [JobTag!]! jobs(filter: [JobFilter!], page: PageRequest, order: OrderByInput): JobResultList!
filterRanges: FilterRanges! jobsStatistics(filter: [JobFilter!], groupBy: Aggregate): [JobsStatistics!]!
userStats(startTime: Time, stopTime: Time, clusterId: String): [UserStats!]!
rooflineHeatmap(filter: [JobFilter!]!, rows: Int!, cols: Int!, minX: Float!, minY: Float!, maxX: Float!, maxY: Float!): [[Float!]!]!
} }
type Mutation { type Mutation {
@ -115,10 +126,6 @@ type FilterRanges {
startTime: TimeRangeOutput! startTime: TimeRangeOutput!
} }
input JobFilterList {
list: [JobFilter]
}
input JobFilter { input JobFilter {
tags: [ID!] tags: [ID!]
jobId: StringInput jobId: StringInput
@ -137,7 +144,7 @@ input JobFilter {
input OrderByInput { input OrderByInput {
field: String! field: String!
order: SortDirectionEnum = ASC order: SortDirectionEnum! = ASC
} }
enum SortDirectionEnum { enum SortDirectionEnum {
@ -163,12 +170,12 @@ input FloatRange {
} }
input TimeRange { input TimeRange {
from: Time! from: Time
to: Time! to: Time
} }
type JobResultList { type JobResultList {
items: [Job]! items: [Job!]!
offset: Int offset: Int
limit: Int limit: Int
count: Int count: Int
@ -180,19 +187,13 @@ type HistoPoint {
} }
type JobsStatistics { type JobsStatistics {
totalJobs: Int! id: ID! # If `groupBy` was used, ID of the user/project/cluster
shortJobs: Int! totalJobs: Int! # Number of jobs that matched
totalWalltime: Int! shortJobs: Int! # Number of jobs with a duration of less than 2 minutes
totalCoreHours: Int! totalWalltime: Int! # Sum of the duration of all matched jobs in hours
histWalltime: [HistoPoint]! totalCoreHours: Int! # Sum of the core hours of all matched jobs
histNumNodes: [HistoPoint]! histWalltime: [HistoPoint!]! # value: hour, count: number of jobs with a rounded duration of value
} histNumNodes: [HistoPoint!]! # value: number of nodes, count: number of jobs with that number of nodes
type UserStats {
userId: ID!
totalJobs: Int!
totalWalltime: Float!
totalCoreHours: Float!
} }
input PageRequest { input PageRequest {
@ -201,3 +202,5 @@ input PageRequest {
} }
scalar Time scalar Time
scalar NullableFloat
scalar JobMetricScope

66
schema/float.go Normal file
View File

@ -0,0 +1,66 @@
package schema
import (
"errors"
"io"
"math"
"strconv"
)
// A custom float type is used so that (Un)MarshalJSON and
// (Un)MarshalGQL can be overloaded and NaN/null can be used.
// The default behaviour of putting every nullable value behind
// a pointer has a bigger overhead.
type Float float64
var NaN Float = Float(math.NaN())
func (f Float) IsNaN() bool {
return math.IsNaN(float64(f))
}
// NaN will be serialized to `null`.
func (f Float) MarshalJSON() ([]byte, error) {
if f.IsNaN() {
return []byte("null"), nil
}
return []byte(strconv.FormatFloat(float64(f), 'f', 2, 64)), nil
}
// `null` will be unserialized to NaN.
func (f *Float) UnmarshalJSON(input []byte) error {
s := string(input)
if s == "null" {
*f = NaN
return nil
}
val, err := strconv.ParseFloat(s, 64)
if err != nil {
return err
}
*f = Float(val)
return nil
}
// UnmarshalGQL implements the graphql.Unmarshaler interface.
func (f *Float) UnmarshalGQL(v interface{}) error {
f64, ok := v.(float64)
if !ok {
return errors.New("invalid Float scalar")
}
*f = Float(f64)
return nil
}
// MarshalGQL implements the graphql.Marshaler interface.
// NaN will be serialized to `null`.
func (f Float) MarshalGQL(w io.Writer) {
if f.IsNaN() {
w.Write([]byte(`null`))
} else {
w.Write([]byte(strconv.FormatFloat(float64(f), 'f', 2, 64)))
}
}

76
schema/metrics.go Normal file
View File

@ -0,0 +1,76 @@
package schema
import (
"fmt"
"io"
)
// Format of `data.json` files.
type JobData map[string]*JobMetric
type JobMetric struct {
Unit string `json:"unit"`
Scope MetricScope `json:"scope"`
Timestep int `json:"timestep"`
Series []*MetricSeries `json:"series"`
}
type MetricScope string
const (
MetricScopeNode MetricScope = "node"
MetricScopeSocket MetricScope = "socket"
MetricScopeCpu MetricScope = "cpu"
)
func (e *MetricScope) UnmarshalGQL(v interface{}) error {
str, ok := v.(string)
if !ok {
return fmt.Errorf("enums must be strings")
}
*e = MetricScope(str)
if *e != "node" && *e != "socket" && *e != "cpu" {
return fmt.Errorf("%s is not a valid MetricScope", str)
}
return nil
}
func (e MetricScope) MarshalGQL(w io.Writer) {
fmt.Fprintf(w, "\"%s\"", e)
}
type MetricStatistics struct {
Avg float64 `json:"avg"`
Min float64 `json:"min"`
Max float64 `json:"max"`
}
type MetricSeries struct {
NodeID string `json:"node_id"`
Statistics *MetricStatistics `json:"statistics"`
Data []Float `json:"data"`
}
// Format of `meta.json` files.
type JobMeta struct {
JobId string `json:"job_id"`
UserId string `json:"user_id"`
ProjectId string `json:"project_id"`
ClusterId string `json:"cluster_id"`
NumNodes int `json:"num_nodes"`
JobState string `json:"job_state"`
StartTime int64 `json:"start_time"`
Duration int64 `json:"duration"`
Nodes []string `json:"nodes"`
Tags []struct {
Name string `json:"name"`
Type string `json:"type"`
} `json:"tags"`
Statistics map[string]struct {
Unit string `json:"unit"`
Avg float64 `json:"avg"`
Min float64 `json:"min"`
Max float64 `json:"max"`
} `json:"statistics"`
}