New GraphQL schema

This commit is contained in:
Lou Knauer
2021-10-26 10:22:02 +02:00
parent 236f51ba9a
commit b8d23f8ea1
7 changed files with 1328 additions and 1292 deletions

View File

@@ -1,45 +1,9 @@
package model
import (
"time"
)
type Job struct {
ID string `json:"id"`
JobID string `json:"jobId" db:"job_id"`
UserID string `json:"userId" db:"user_id"`
ProjectID string `json:"projectId" db:"project_id"`
ClusterID string `json:"clusterId" db:"cluster_id"`
StartTime time.Time `json:"startTime" db:"start_time"`
Duration int `json:"duration" db:"duration"`
Walltime *int `json:"walltime" db:"walltime"`
Jobstate *string `json:"jobstate" db:"job_state"`
NumNodes int `json:"numNodes" db:"num_nodes"`
NodeList string `json:"nodelist" db:"node_list"`
HasProfile bool `json:"hasProfile" db:"has_profile"`
MemUsedMax *float64 `json:"memUsedMax" db:"mem_used_max"`
FlopsAnyAvg *float64 `json:"flopsAnyAvg" db:"flops_any_avg"`
MemBwAvg *float64 `json:"memBwAvg" db:"mem_bw_avg"`
NetBwAvg *float64 `json:"netBwAvg" db:"net_bw_avg"`
FileBwAvg *float64 `json:"fileBwAvg" db:"file_bw_avg"`
LoadAvg *float64 `json:"loadAvg" db:"load_avg"`
Tags []JobTag `json:"tags"`
}
// Go look at `gqlgen.yml` and the schema package for other non-generated models.
type JobTag struct {
ID string `db:"id"`
TagType string `db:"tag_type"`
TagName string `db:"tag_name"`
}
type Cluster struct {
ClusterID string `json:"clusterID"`
ProcessorType string `json:"processorType"`
SocketsPerNode int `json:"socketsPerNode"`
CoresPerSocket int `json:"coresPerSocket"`
ThreadsPerCore int `json:"threadsPerCore"`
FlopRateScalar int `json:"flopRateScalar"`
FlopRateSimd int `json:"flopRateSimd"`
MemoryBandwidth int `json:"memoryBandwidth"`
MetricConfig []MetricConfig `json:"metricConfig"`
ID string `json:"id" db:"id"`
TagType string `json:"tagType" db:"tag_type"`
TagName string `json:"tagName" db:"tag_name"`
}

View File

@@ -7,8 +7,23 @@ import (
"io"
"strconv"
"time"
"github.com/ClusterCockpit/cc-jobarchive/schema"
)
type Cluster struct {
ClusterID string `json:"clusterID"`
ProcessorType string `json:"processorType"`
SocketsPerNode int `json:"socketsPerNode"`
CoresPerSocket int `json:"coresPerSocket"`
ThreadsPerCore int `json:"threadsPerCore"`
FlopRateScalar int `json:"flopRateScalar"`
FlopRateSimd int `json:"flopRateSimd"`
MemoryBandwidth int `json:"memoryBandwidth"`
MetricConfig []*MetricConfig `json:"metricConfig"`
FilterRanges *FilterRanges `json:"filterRanges"`
}
type FilterRanges struct {
Duration *IntRangeOutput `json:"duration"`
NumNodes *IntRangeOutput `json:"numNodes"`
@@ -35,6 +50,27 @@ type IntRangeOutput struct {
To int `json:"to"`
}
type Job struct {
ID string `json:"id"`
JobID string `json:"jobId"`
UserID string `json:"userId"`
ProjectID string `json:"projectId"`
ClusterID string `json:"clusterId"`
StartTime time.Time `json:"startTime"`
Duration int `json:"duration"`
NumNodes int `json:"numNodes"`
Nodes []string `json:"nodes"`
HasProfile bool `json:"hasProfile"`
State JobState `json:"state"`
Tags []*JobTag `json:"tags"`
LoadAvg *float64 `json:"loadAvg"`
MemUsedMax *float64 `json:"memUsedMax"`
FlopsAnyAvg *float64 `json:"flopsAnyAvg"`
MemBwAvg *float64 `json:"memBwAvg"`
NetBwAvg *float64 `json:"netBwAvg"`
FileBwAvg *float64 `json:"fileBwAvg"`
}
type JobFilter struct {
Tags []string `json:"tags"`
JobID *StringInput `json:"jobId"`
@@ -51,32 +87,9 @@ type JobFilter struct {
MemUsedMax *FloatRange `json:"memUsedMax"`
}
type JobFilterList struct {
List []*JobFilter `json:"list"`
}
type JobMetric struct {
Unit string `json:"unit"`
Scope JobMetricScope `json:"scope"`
Timestep int `json:"timestep"`
Series []*JobMetricSeries `json:"series"`
}
type JobMetricSeries struct {
NodeID string `json:"node_id"`
Statistics *JobMetricStatistics `json:"statistics"`
Data []*float64 `json:"data"`
}
type JobMetricStatistics struct {
Avg float64 `json:"avg"`
Min float64 `json:"min"`
Max float64 `json:"max"`
}
type JobMetricWithName struct {
Name string `json:"name"`
Metric *JobMetric `json:"metric"`
Name string `json:"name"`
Metric *schema.JobMetric `json:"metric"`
}
type JobResultList struct {
@@ -87,6 +100,7 @@ type JobResultList struct {
}
type JobsStatistics struct {
ID string `json:"id"`
TotalJobs int `json:"totalJobs"`
ShortJobs int `json:"shortJobs"`
TotalWalltime int `json:"totalWalltime"`
@@ -105,9 +119,14 @@ type MetricConfig struct {
Alert int `json:"alert"`
}
type MetricFootprints struct {
Name string `json:"name"`
Footprints []schema.Float `json:"footprints"`
}
type OrderByInput struct {
Field string `json:"field"`
Order *SortDirectionEnum `json:"order"`
Field string `json:"field"`
Order SortDirectionEnum `json:"order"`
}
type PageRequest struct {
@@ -123,8 +142,8 @@ type StringInput struct {
}
type TimeRange struct {
From time.Time `json:"from"`
To time.Time `json:"to"`
From *time.Time `json:"from"`
To *time.Time `json:"to"`
}
type TimeRangeOutput struct {
@@ -132,53 +151,87 @@ type TimeRangeOutput struct {
To time.Time `json:"to"`
}
type UserStats struct {
UserID string `json:"userId"`
TotalJobs int `json:"totalJobs"`
TotalWalltime float64 `json:"totalWalltime"`
TotalCoreHours float64 `json:"totalCoreHours"`
}
type JobMetricScope string
type Aggregate string
const (
JobMetricScopeNode JobMetricScope = "node"
JobMetricScopeCPU JobMetricScope = "cpu"
JobMetricScopeSocket JobMetricScope = "socket"
AggregateUser Aggregate = "USER"
AggregateProject Aggregate = "PROJECT"
AggregateCluster Aggregate = "CLUSTER"
)
var AllJobMetricScope = []JobMetricScope{
JobMetricScopeNode,
JobMetricScopeCPU,
JobMetricScopeSocket,
var AllAggregate = []Aggregate{
AggregateUser,
AggregateProject,
AggregateCluster,
}
func (e JobMetricScope) IsValid() bool {
func (e Aggregate) IsValid() bool {
switch e {
case JobMetricScopeNode, JobMetricScopeCPU, JobMetricScopeSocket:
case AggregateUser, AggregateProject, AggregateCluster:
return true
}
return false
}
func (e JobMetricScope) String() string {
func (e Aggregate) String() string {
return string(e)
}
func (e *JobMetricScope) UnmarshalGQL(v interface{}) error {
func (e *Aggregate) UnmarshalGQL(v interface{}) error {
str, ok := v.(string)
if !ok {
return fmt.Errorf("enums must be strings")
}
*e = JobMetricScope(str)
*e = Aggregate(str)
if !e.IsValid() {
return fmt.Errorf("%s is not a valid JobMetricScope", str)
return fmt.Errorf("%s is not a valid Aggregate", str)
}
return nil
}
func (e JobMetricScope) MarshalGQL(w io.Writer) {
func (e Aggregate) MarshalGQL(w io.Writer) {
fmt.Fprint(w, strconv.Quote(e.String()))
}
type JobState string
const (
JobStateRunning JobState = "running"
JobStateCompleted JobState = "completed"
)
var AllJobState = []JobState{
JobStateRunning,
JobStateCompleted,
}
func (e JobState) IsValid() bool {
switch e {
case JobStateRunning, JobStateCompleted:
return true
}
return false
}
func (e JobState) String() string {
return string(e)
}
func (e *JobState) UnmarshalGQL(v interface{}) error {
str, ok := v.(string)
if !ok {
return fmt.Errorf("enums must be strings")
}
*e = JobState(str)
if !e.IsValid() {
return fmt.Errorf("%s is not a valid JobState", str)
}
return nil
}
func (e JobState) MarshalGQL(w io.Writer) {
fmt.Fprint(w, strconv.Quote(e.String()))
}