Merge branch 'dev' into metricstore

This commit is contained in:
Jan Eitzinger
2025-09-10 09:14:50 +02:00
committed by GitHub
26 changed files with 2836 additions and 945 deletions

View File

@@ -12,12 +12,13 @@ type Node {
hostname: String! hostname: String!
cluster: String! cluster: String!
subCluster: String! subCluster: String!
runningJobs: Int!
nodeState: NodeState! nodeState: NodeState!
HealthState: MonitoringState! healthState: MonitoringState!
metaData: Any metaData: Any
} }
type NodeStats { type NodeStates {
state: String! state: String!
count: Int! count: Int!
} }
@@ -236,10 +237,12 @@ enum Aggregate {
USER USER
PROJECT PROJECT
CLUSTER CLUSTER
SUBCLUSTER
} }
enum SortByAggregate { enum SortByAggregate {
TOTALWALLTIME TOTALWALLTIME
TOTALJOBS TOTALJOBS
TOTALUSERS
TOTALNODES TOTALNODES
TOTALNODEHOURS TOTALNODEHOURS
TOTALCORES TOTALCORES
@@ -300,9 +303,10 @@ type Query {
user(username: String!): User user(username: String!): User
allocatedNodes(cluster: String!): [Count!]! allocatedNodes(cluster: String!): [Count!]!
## Node Queries New
node(id: ID!): Node node(id: ID!): Node
nodes(filter: [NodeFilter!], order: OrderByInput): NodeStateResultList! nodes(filter: [NodeFilter!], order: OrderByInput): NodeStateResultList!
nodeStats(filter: [NodeFilter!]): [NodeStats!]! nodeStates(filter: [NodeFilter!]): [NodeStates!]!
job(id: ID!): Job job(id: ID!): Job
jobMetrics( jobMetrics(
@@ -357,6 +361,7 @@ type Query {
from: Time! from: Time!
to: Time! to: Time!
): [NodeMetrics!]! ): [NodeMetrics!]!
nodeMetricsList( nodeMetricsList(
cluster: String! cluster: String!
subCluster: String! subCluster: String!
@@ -393,6 +398,7 @@ type TimeRangeOutput {
input NodeFilter { input NodeFilter {
hostname: StringInput hostname: StringInput
cluster: StringInput cluster: StringInput
subcluster: StringInput
nodeState: NodeState nodeState: NodeState
healthState: MonitoringState healthState: MonitoringState
} }
@@ -497,11 +503,12 @@ type MetricHistoPoint {
} }
type JobsStatistics { type JobsStatistics {
id: ID! # If `groupBy` was used, ID of the user/project/cluster id: ID! # If `groupBy` was used, ID of the user/project/cluster/subcluster
name: String! # if User-Statistics: Given Name of Account (ID) Owner name: String! # if User-Statistics: Given Name of Account (ID) Owner
totalUsers: Int! # if *not* User-Statistics: Number of active users (based on running jobs)
totalJobs: Int! # Number of jobs totalJobs: Int! # Number of jobs
runningJobs: Int! # Number of running jobs runningJobs: Int! # Number of running jobs
shortJobs: Int! # Number of jobs with a duration of less than duration shortJobs: Int! # Number of jobs with a duration of less than config'd ShortRunningJobsDuration
totalWalltime: Int! # Sum of the duration of all matched jobs in hours totalWalltime: Int! # Sum of the duration of all matched jobs in hours
totalNodes: Int! # Sum of the nodes of all matched jobs totalNodes: Int! # Sum of the nodes of all matched jobs
totalNodeHours: Int! # Sum of the node hours of all matched jobs totalNodeHours: Int! # Sum of the node hours of all matched jobs

View File

@@ -113,6 +113,7 @@ func (api *RestApi) getJobs(rw http.ResponseWriter, r *http.Request) {
for key, vals := range r.URL.Query() { for key, vals := range r.URL.Query() {
switch key { switch key {
// TODO: add project filter
case "state": case "state":
for _, s := range vals { for _, s := range vals {
state := schema.JobState(s) state := schema.JobState(s)
@@ -125,7 +126,7 @@ func (api *RestApi) getJobs(rw http.ResponseWriter, r *http.Request) {
} }
case "cluster": case "cluster":
filter.Cluster = &model.StringInput{Eq: &vals[0]} filter.Cluster = &model.StringInput{Eq: &vals[0]}
case "start-time": case "start-time": // ?startTime=1753707480-1754053139
st := strings.Split(vals[0], "-") st := strings.Split(vals[0], "-")
if len(st) != 2 { if len(st) != 2 {
handleError(fmt.Errorf("invalid query parameter value: startTime"), handleError(fmt.Errorf("invalid query parameter value: startTime"),

View File

@@ -402,7 +402,7 @@ func (auth *Authentication) AuthUserApi(
return return
} }
case len(user.Roles) >= 2: case len(user.Roles) >= 2:
if user.HasRole(schema.RoleApi) && user.HasAnyRole([]schema.Role{schema.RoleUser, schema.RoleManager, schema.RoleAdmin}) { if user.HasRole(schema.RoleApi) && user.HasAnyRole([]schema.Role{schema.RoleUser, schema.RoleManager, schema.RoleSupport, schema.RoleAdmin}) {
ctx := context.WithValue(r.Context(), repository.ContextUserKey, user) ctx := context.WithValue(r.Context(), repository.ContextUserKey, user)
onsuccess.ServeHTTP(rw, r.WithContext(ctx)) onsuccess.ServeHTTP(rw, r.WithContext(ctx))
return return
@@ -530,6 +530,7 @@ func securedCheck(user *schema.User, r *http.Request) error {
IPAddress = r.RemoteAddr IPAddress = r.RemoteAddr
} }
// FIXME: IPV6 not handled
if strings.Contains(IPAddress, ":") { if strings.Contains(IPAddress, ":") {
IPAddress = strings.Split(IPAddress, ":")[0] IPAddress = strings.Split(IPAddress, ":")[0]
} }

View File

@@ -202,6 +202,7 @@ type ComplexityRoot struct {
TotalJobs func(childComplexity int) int TotalJobs func(childComplexity int) int
TotalNodeHours func(childComplexity int) int TotalNodeHours func(childComplexity int) int
TotalNodes func(childComplexity int) int TotalNodes func(childComplexity int) int
TotalUsers func(childComplexity int) int
TotalWalltime func(childComplexity int) int TotalWalltime func(childComplexity int) int
} }
@@ -277,6 +278,7 @@ type ComplexityRoot struct {
ID func(childComplexity int) int ID func(childComplexity int) int
MetaData func(childComplexity int) int MetaData func(childComplexity int) int
NodeState func(childComplexity int) int NodeState func(childComplexity int) int
RunningJobs func(childComplexity int) int
SubCluster func(childComplexity int) int SubCluster func(childComplexity int) int
} }
@@ -291,7 +293,7 @@ type ComplexityRoot struct {
Items func(childComplexity int) int Items func(childComplexity int) int
} }
NodeStats struct { NodeStates struct {
Count func(childComplexity int) int Count func(childComplexity int) int
State func(childComplexity int) int State func(childComplexity int) int
} }
@@ -319,7 +321,7 @@ type ComplexityRoot struct {
Node func(childComplexity int, id string) int Node func(childComplexity int, id string) int
NodeMetrics func(childComplexity int, cluster string, nodes []string, scopes []schema.MetricScope, metrics []string, from time.Time, to time.Time) int NodeMetrics func(childComplexity int, cluster string, nodes []string, scopes []schema.MetricScope, metrics []string, from time.Time, to time.Time) int
NodeMetricsList func(childComplexity int, cluster string, subCluster string, nodeFilter string, scopes []schema.MetricScope, metrics []string, from time.Time, to time.Time, page *model.PageRequest, resolution *int) int NodeMetricsList func(childComplexity int, cluster string, subCluster string, nodeFilter string, scopes []schema.MetricScope, metrics []string, from time.Time, to time.Time, page *model.PageRequest, resolution *int) int
NodeStats func(childComplexity int, filter []*model.NodeFilter) int NodeStates func(childComplexity int, filter []*model.NodeFilter) int
Nodes func(childComplexity int, filter []*model.NodeFilter, order *model.OrderByInput) int Nodes func(childComplexity int, filter []*model.NodeFilter, order *model.OrderByInput) int
RooflineHeatmap func(childComplexity int, filter []*model.JobFilter, rows int, cols int, minX float64, minY float64, maxX float64, maxY float64) int RooflineHeatmap func(childComplexity int, filter []*model.JobFilter, rows int, cols int, minX float64, minY float64, maxX float64, maxY float64) int
ScopedJobStats func(childComplexity int, id string, metrics []string, scopes []schema.MetricScope) int ScopedJobStats func(childComplexity int, id string, metrics []string, scopes []schema.MetricScope) int
@@ -445,6 +447,7 @@ type MutationResolver interface {
UpdateConfiguration(ctx context.Context, name string, value string) (*string, error) UpdateConfiguration(ctx context.Context, name string, value string) (*string, error)
} }
type NodeResolver interface { type NodeResolver interface {
RunningJobs(ctx context.Context, obj *schema.Node) (int, error)
NodeState(ctx context.Context, obj *schema.Node) (string, error) NodeState(ctx context.Context, obj *schema.Node) (string, error)
HealthState(ctx context.Context, obj *schema.Node) (schema.NodeState, error) HealthState(ctx context.Context, obj *schema.Node) (schema.NodeState, error)
MetaData(ctx context.Context, obj *schema.Node) (any, error) MetaData(ctx context.Context, obj *schema.Node) (any, error)
@@ -457,7 +460,7 @@ type QueryResolver interface {
AllocatedNodes(ctx context.Context, cluster string) ([]*model.Count, error) AllocatedNodes(ctx context.Context, cluster string) ([]*model.Count, error)
Node(ctx context.Context, id string) (*schema.Node, error) Node(ctx context.Context, id string) (*schema.Node, error)
Nodes(ctx context.Context, filter []*model.NodeFilter, order *model.OrderByInput) (*model.NodeStateResultList, error) Nodes(ctx context.Context, filter []*model.NodeFilter, order *model.OrderByInput) (*model.NodeStateResultList, error)
NodeStats(ctx context.Context, filter []*model.NodeFilter) ([]*model.NodeStats, error) NodeStates(ctx context.Context, filter []*model.NodeFilter) ([]*model.NodeStates, error)
Job(ctx context.Context, id string) (*schema.Job, error) Job(ctx context.Context, id string) (*schema.Job, error)
JobMetrics(ctx context.Context, id string, metrics []string, scopes []schema.MetricScope, resolution *int) ([]*model.JobMetricWithName, error) JobMetrics(ctx context.Context, id string, metrics []string, scopes []schema.MetricScope, resolution *int) ([]*model.JobMetricWithName, error)
JobStats(ctx context.Context, id string, metrics []string) ([]*model.NamedStats, error) JobStats(ctx context.Context, id string, metrics []string) ([]*model.NamedStats, error)
@@ -1165,6 +1168,13 @@ func (e *executableSchema) Complexity(ctx context.Context, typeName, field strin
return e.complexity.JobsStatistics.TotalNodes(childComplexity), true return e.complexity.JobsStatistics.TotalNodes(childComplexity), true
case "JobsStatistics.totalUsers":
if e.complexity.JobsStatistics.TotalUsers == nil {
break
}
return e.complexity.JobsStatistics.TotalUsers(childComplexity), true
case "JobsStatistics.totalWalltime": case "JobsStatistics.totalWalltime":
if e.complexity.JobsStatistics.TotalWalltime == nil { if e.complexity.JobsStatistics.TotalWalltime == nil {
break break
@@ -1475,7 +1485,7 @@ func (e *executableSchema) Complexity(ctx context.Context, typeName, field strin
return e.complexity.Node.Cluster(childComplexity), true return e.complexity.Node.Cluster(childComplexity), true
case "Node.HealthState": case "Node.healthState":
if e.complexity.Node.HealthState == nil { if e.complexity.Node.HealthState == nil {
break break
} }
@@ -1510,6 +1520,13 @@ func (e *executableSchema) Complexity(ctx context.Context, typeName, field strin
return e.complexity.Node.NodeState(childComplexity), true return e.complexity.Node.NodeState(childComplexity), true
case "Node.runningJobs":
if e.complexity.Node.RunningJobs == nil {
break
}
return e.complexity.Node.RunningJobs(childComplexity), true
case "Node.subCluster": case "Node.subCluster":
if e.complexity.Node.SubCluster == nil { if e.complexity.Node.SubCluster == nil {
break break
@@ -1552,19 +1569,19 @@ func (e *executableSchema) Complexity(ctx context.Context, typeName, field strin
return e.complexity.NodeStateResultList.Items(childComplexity), true return e.complexity.NodeStateResultList.Items(childComplexity), true
case "NodeStats.count": case "NodeStates.count":
if e.complexity.NodeStats.Count == nil { if e.complexity.NodeStates.Count == nil {
break break
} }
return e.complexity.NodeStats.Count(childComplexity), true return e.complexity.NodeStates.Count(childComplexity), true
case "NodeStats.state": case "NodeStates.state":
if e.complexity.NodeStats.State == nil { if e.complexity.NodeStates.State == nil {
break break
} }
return e.complexity.NodeStats.State(childComplexity), true return e.complexity.NodeStates.State(childComplexity), true
case "NodesResultList.count": case "NodesResultList.count":
if e.complexity.NodesResultList.Count == nil { if e.complexity.NodesResultList.Count == nil {
@@ -1754,17 +1771,17 @@ func (e *executableSchema) Complexity(ctx context.Context, typeName, field strin
return e.complexity.Query.NodeMetricsList(childComplexity, args["cluster"].(string), args["subCluster"].(string), args["nodeFilter"].(string), args["scopes"].([]schema.MetricScope), args["metrics"].([]string), args["from"].(time.Time), args["to"].(time.Time), args["page"].(*model.PageRequest), args["resolution"].(*int)), true return e.complexity.Query.NodeMetricsList(childComplexity, args["cluster"].(string), args["subCluster"].(string), args["nodeFilter"].(string), args["scopes"].([]schema.MetricScope), args["metrics"].([]string), args["from"].(time.Time), args["to"].(time.Time), args["page"].(*model.PageRequest), args["resolution"].(*int)), true
case "Query.nodeStats": case "Query.nodeStates":
if e.complexity.Query.NodeStats == nil { if e.complexity.Query.NodeStates == nil {
break break
} }
args, err := ec.field_Query_nodeStats_args(ctx, rawArgs) args, err := ec.field_Query_nodeStates_args(ctx, rawArgs)
if err != nil { if err != nil {
return 0, false return 0, false
} }
return e.complexity.Query.NodeStats(childComplexity, args["filter"].([]*model.NodeFilter)), true return e.complexity.Query.NodeStates(childComplexity, args["filter"].([]*model.NodeFilter)), true
case "Query.nodes": case "Query.nodes":
if e.complexity.Query.Nodes == nil { if e.complexity.Query.Nodes == nil {
@@ -2334,12 +2351,13 @@ type Node {
hostname: String! hostname: String!
cluster: String! cluster: String!
subCluster: String! subCluster: String!
runningJobs: Int!
nodeState: NodeState! nodeState: NodeState!
HealthState: MonitoringState! healthState: MonitoringState!
metaData: Any metaData: Any
} }
type NodeStats { type NodeStates {
state: String! state: String!
count: Int! count: Int!
} }
@@ -2558,10 +2576,12 @@ enum Aggregate {
USER USER
PROJECT PROJECT
CLUSTER CLUSTER
SUBCLUSTER
} }
enum SortByAggregate { enum SortByAggregate {
TOTALWALLTIME TOTALWALLTIME
TOTALJOBS TOTALJOBS
TOTALUSERS
TOTALNODES TOTALNODES
TOTALNODEHOURS TOTALNODEHOURS
TOTALCORES TOTALCORES
@@ -2622,9 +2642,10 @@ type Query {
user(username: String!): User user(username: String!): User
allocatedNodes(cluster: String!): [Count!]! allocatedNodes(cluster: String!): [Count!]!
## Node Queries New
node(id: ID!): Node node(id: ID!): Node
nodes(filter: [NodeFilter!], order: OrderByInput): NodeStateResultList! nodes(filter: [NodeFilter!], order: OrderByInput): NodeStateResultList!
nodeStats(filter: [NodeFilter!]): [NodeStats!]! nodeStates(filter: [NodeFilter!]): [NodeStates!]!
job(id: ID!): Job job(id: ID!): Job
jobMetrics( jobMetrics(
@@ -2679,6 +2700,7 @@ type Query {
from: Time! from: Time!
to: Time! to: Time!
): [NodeMetrics!]! ): [NodeMetrics!]!
nodeMetricsList( nodeMetricsList(
cluster: String! cluster: String!
subCluster: String! subCluster: String!
@@ -2715,6 +2737,7 @@ type TimeRangeOutput {
input NodeFilter { input NodeFilter {
hostname: StringInput hostname: StringInput
cluster: StringInput cluster: StringInput
subcluster: StringInput
nodeState: NodeState nodeState: NodeState
healthState: MonitoringState healthState: MonitoringState
} }
@@ -2819,11 +2842,12 @@ type MetricHistoPoint {
} }
type JobsStatistics { type JobsStatistics {
id: ID! # If ` + "`" + `groupBy` + "`" + ` was used, ID of the user/project/cluster id: ID! # If ` + "`" + `groupBy` + "`" + ` was used, ID of the user/project/cluster/subcluster
name: String! # if User-Statistics: Given Name of Account (ID) Owner name: String! # if User-Statistics: Given Name of Account (ID) Owner
totalUsers: Int! # if *not* User-Statistics: Number of active users (based on running jobs)
totalJobs: Int! # Number of jobs totalJobs: Int! # Number of jobs
runningJobs: Int! # Number of running jobs runningJobs: Int! # Number of running jobs
shortJobs: Int! # Number of jobs with a duration of less than duration shortJobs: Int! # Number of jobs with a duration of less than config'd ShortRunningJobsDuration
totalWalltime: Int! # Sum of the duration of all matched jobs in hours totalWalltime: Int! # Sum of the duration of all matched jobs in hours
totalNodes: Int! # Sum of the nodes of all matched jobs totalNodes: Int! # Sum of the nodes of all matched jobs
totalNodeHours: Int! # Sum of the node hours of all matched jobs totalNodeHours: Int! # Sum of the node hours of all matched jobs
@@ -3197,7 +3221,7 @@ func (ec *executionContext) field_Query_nodeMetrics_args(ctx context.Context, ra
return args, nil return args, nil
} }
func (ec *executionContext) field_Query_nodeStats_args(ctx context.Context, rawArgs map[string]any) (map[string]any, error) { func (ec *executionContext) field_Query_nodeStates_args(ctx context.Context, rawArgs map[string]any) (map[string]any, error) {
var err error var err error
args := map[string]any{} args := map[string]any{}
arg0, err := graphql.ProcessArgField(ctx, rawArgs, "filter", ec.unmarshalONodeFilter2ᚕᚖgithubᚗcomᚋClusterCockpitᚋccᚑbackendᚋinternalᚋgraphᚋmodelᚐNodeFilterᚄ) arg0, err := graphql.ProcessArgField(ctx, rawArgs, "filter", ec.unmarshalONodeFilter2ᚕᚖgithubᚗcomᚋClusterCockpitᚋccᚑbackendᚋinternalᚋgraphᚋmodelᚐNodeFilterᚄ)
@@ -7125,6 +7149,50 @@ func (ec *executionContext) fieldContext_JobsStatistics_name(_ context.Context,
return fc, nil return fc, nil
} }
func (ec *executionContext) _JobsStatistics_totalUsers(ctx context.Context, field graphql.CollectedField, obj *model.JobsStatistics) (ret graphql.Marshaler) {
fc, err := ec.fieldContext_JobsStatistics_totalUsers(ctx, field)
if err != nil {
return graphql.Null
}
ctx = graphql.WithFieldContext(ctx, fc)
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (any, error) {
ctx = rctx // use context from middleware stack in children
return obj.TotalUsers, nil
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
if !graphql.HasFieldError(ctx, fc) {
ec.Errorf(ctx, "must not be null")
}
return graphql.Null
}
res := resTmp.(int)
fc.Result = res
return ec.marshalNInt2int(ctx, field.Selections, res)
}
func (ec *executionContext) fieldContext_JobsStatistics_totalUsers(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) {
fc = &graphql.FieldContext{
Object: "JobsStatistics",
Field: field,
IsMethod: false,
IsResolver: false,
Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) {
return nil, errors.New("field of type Int does not have child fields")
},
}
return fc, nil
}
func (ec *executionContext) _JobsStatistics_totalJobs(ctx context.Context, field graphql.CollectedField, obj *model.JobsStatistics) (ret graphql.Marshaler) { func (ec *executionContext) _JobsStatistics_totalJobs(ctx context.Context, field graphql.CollectedField, obj *model.JobsStatistics) (ret graphql.Marshaler) {
fc, err := ec.fieldContext_JobsStatistics_totalJobs(ctx, field) fc, err := ec.fieldContext_JobsStatistics_totalJobs(ctx, field)
if err != nil { if err != nil {
@@ -9788,6 +9856,50 @@ func (ec *executionContext) fieldContext_Node_subCluster(_ context.Context, fiel
return fc, nil return fc, nil
} }
func (ec *executionContext) _Node_runningJobs(ctx context.Context, field graphql.CollectedField, obj *schema.Node) (ret graphql.Marshaler) {
fc, err := ec.fieldContext_Node_runningJobs(ctx, field)
if err != nil {
return graphql.Null
}
ctx = graphql.WithFieldContext(ctx, fc)
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (any, error) {
ctx = rctx // use context from middleware stack in children
return ec.resolvers.Node().RunningJobs(rctx, obj)
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
if !graphql.HasFieldError(ctx, fc) {
ec.Errorf(ctx, "must not be null")
}
return graphql.Null
}
res := resTmp.(int)
fc.Result = res
return ec.marshalNInt2int(ctx, field.Selections, res)
}
func (ec *executionContext) fieldContext_Node_runningJobs(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) {
fc = &graphql.FieldContext{
Object: "Node",
Field: field,
IsMethod: true,
IsResolver: true,
Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) {
return nil, errors.New("field of type Int does not have child fields")
},
}
return fc, nil
}
func (ec *executionContext) _Node_nodeState(ctx context.Context, field graphql.CollectedField, obj *schema.Node) (ret graphql.Marshaler) { func (ec *executionContext) _Node_nodeState(ctx context.Context, field graphql.CollectedField, obj *schema.Node) (ret graphql.Marshaler) {
fc, err := ec.fieldContext_Node_nodeState(ctx, field) fc, err := ec.fieldContext_Node_nodeState(ctx, field)
if err != nil { if err != nil {
@@ -9832,8 +9944,8 @@ func (ec *executionContext) fieldContext_Node_nodeState(_ context.Context, field
return fc, nil return fc, nil
} }
func (ec *executionContext) _Node_HealthState(ctx context.Context, field graphql.CollectedField, obj *schema.Node) (ret graphql.Marshaler) { func (ec *executionContext) _Node_healthState(ctx context.Context, field graphql.CollectedField, obj *schema.Node) (ret graphql.Marshaler) {
fc, err := ec.fieldContext_Node_HealthState(ctx, field) fc, err := ec.fieldContext_Node_healthState(ctx, field)
if err != nil { if err != nil {
return graphql.Null return graphql.Null
} }
@@ -9863,7 +9975,7 @@ func (ec *executionContext) _Node_HealthState(ctx context.Context, field graphql
return ec.marshalNMonitoringState2githubᚗcomᚋClusterCockpitᚋccᚑlibᚋschemaᚐNodeState(ctx, field.Selections, res) return ec.marshalNMonitoringState2githubᚗcomᚋClusterCockpitᚋccᚑlibᚋschemaᚐNodeState(ctx, field.Selections, res)
} }
func (ec *executionContext) fieldContext_Node_HealthState(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { func (ec *executionContext) fieldContext_Node_healthState(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) {
fc = &graphql.FieldContext{ fc = &graphql.FieldContext{
Object: "Node", Object: "Node",
Field: field, Field: field,
@@ -10104,10 +10216,12 @@ func (ec *executionContext) fieldContext_NodeStateResultList_items(_ context.Con
return ec.fieldContext_Node_cluster(ctx, field) return ec.fieldContext_Node_cluster(ctx, field)
case "subCluster": case "subCluster":
return ec.fieldContext_Node_subCluster(ctx, field) return ec.fieldContext_Node_subCluster(ctx, field)
case "runningJobs":
return ec.fieldContext_Node_runningJobs(ctx, field)
case "nodeState": case "nodeState":
return ec.fieldContext_Node_nodeState(ctx, field) return ec.fieldContext_Node_nodeState(ctx, field)
case "HealthState": case "healthState":
return ec.fieldContext_Node_HealthState(ctx, field) return ec.fieldContext_Node_healthState(ctx, field)
case "metaData": case "metaData":
return ec.fieldContext_Node_metaData(ctx, field) return ec.fieldContext_Node_metaData(ctx, field)
} }
@@ -10158,8 +10272,8 @@ func (ec *executionContext) fieldContext_NodeStateResultList_count(_ context.Con
return fc, nil return fc, nil
} }
func (ec *executionContext) _NodeStats_state(ctx context.Context, field graphql.CollectedField, obj *model.NodeStats) (ret graphql.Marshaler) { func (ec *executionContext) _NodeStates_state(ctx context.Context, field graphql.CollectedField, obj *model.NodeStates) (ret graphql.Marshaler) {
fc, err := ec.fieldContext_NodeStats_state(ctx, field) fc, err := ec.fieldContext_NodeStates_state(ctx, field)
if err != nil { if err != nil {
return graphql.Null return graphql.Null
} }
@@ -10189,9 +10303,9 @@ func (ec *executionContext) _NodeStats_state(ctx context.Context, field graphql.
return ec.marshalNString2string(ctx, field.Selections, res) return ec.marshalNString2string(ctx, field.Selections, res)
} }
func (ec *executionContext) fieldContext_NodeStats_state(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { func (ec *executionContext) fieldContext_NodeStates_state(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) {
fc = &graphql.FieldContext{ fc = &graphql.FieldContext{
Object: "NodeStats", Object: "NodeStates",
Field: field, Field: field,
IsMethod: false, IsMethod: false,
IsResolver: false, IsResolver: false,
@@ -10202,8 +10316,8 @@ func (ec *executionContext) fieldContext_NodeStats_state(_ context.Context, fiel
return fc, nil return fc, nil
} }
func (ec *executionContext) _NodeStats_count(ctx context.Context, field graphql.CollectedField, obj *model.NodeStats) (ret graphql.Marshaler) { func (ec *executionContext) _NodeStates_count(ctx context.Context, field graphql.CollectedField, obj *model.NodeStates) (ret graphql.Marshaler) {
fc, err := ec.fieldContext_NodeStats_count(ctx, field) fc, err := ec.fieldContext_NodeStates_count(ctx, field)
if err != nil { if err != nil {
return graphql.Null return graphql.Null
} }
@@ -10233,9 +10347,9 @@ func (ec *executionContext) _NodeStats_count(ctx context.Context, field graphql.
return ec.marshalNInt2int(ctx, field.Selections, res) return ec.marshalNInt2int(ctx, field.Selections, res)
} }
func (ec *executionContext) fieldContext_NodeStats_count(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { func (ec *executionContext) fieldContext_NodeStates_count(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) {
fc = &graphql.FieldContext{ fc = &graphql.FieldContext{
Object: "NodeStats", Object: "NodeStates",
Field: field, Field: field,
IsMethod: false, IsMethod: false,
IsResolver: false, IsResolver: false,
@@ -10830,10 +10944,12 @@ func (ec *executionContext) fieldContext_Query_node(ctx context.Context, field g
return ec.fieldContext_Node_cluster(ctx, field) return ec.fieldContext_Node_cluster(ctx, field)
case "subCluster": case "subCluster":
return ec.fieldContext_Node_subCluster(ctx, field) return ec.fieldContext_Node_subCluster(ctx, field)
case "runningJobs":
return ec.fieldContext_Node_runningJobs(ctx, field)
case "nodeState": case "nodeState":
return ec.fieldContext_Node_nodeState(ctx, field) return ec.fieldContext_Node_nodeState(ctx, field)
case "HealthState": case "healthState":
return ec.fieldContext_Node_HealthState(ctx, field) return ec.fieldContext_Node_healthState(ctx, field)
case "metaData": case "metaData":
return ec.fieldContext_Node_metaData(ctx, field) return ec.fieldContext_Node_metaData(ctx, field)
} }
@@ -10915,8 +11031,8 @@ func (ec *executionContext) fieldContext_Query_nodes(ctx context.Context, field
return fc, nil return fc, nil
} }
func (ec *executionContext) _Query_nodeStats(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { func (ec *executionContext) _Query_nodeStates(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) {
fc, err := ec.fieldContext_Query_nodeStats(ctx, field) fc, err := ec.fieldContext_Query_nodeStates(ctx, field)
if err != nil { if err != nil {
return graphql.Null return graphql.Null
} }
@@ -10929,7 +11045,7 @@ func (ec *executionContext) _Query_nodeStats(ctx context.Context, field graphql.
}() }()
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (any, error) { resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (any, error) {
ctx = rctx // use context from middleware stack in children ctx = rctx // use context from middleware stack in children
return ec.resolvers.Query().NodeStats(rctx, fc.Args["filter"].([]*model.NodeFilter)) return ec.resolvers.Query().NodeStates(rctx, fc.Args["filter"].([]*model.NodeFilter))
}) })
if err != nil { if err != nil {
ec.Error(ctx, err) ec.Error(ctx, err)
@@ -10941,12 +11057,12 @@ func (ec *executionContext) _Query_nodeStats(ctx context.Context, field graphql.
} }
return graphql.Null return graphql.Null
} }
res := resTmp.([]*model.NodeStats) res := resTmp.([]*model.NodeStates)
fc.Result = res fc.Result = res
return ec.marshalNNodeStats2ᚕᚖgithubᚗcomᚋClusterCockpitᚋccᚑbackendᚋinternalᚋgraphᚋmodelᚐNodeStatsᚄ(ctx, field.Selections, res) return ec.marshalNNodeStates2ᚕᚖgithubᚗcomᚋClusterCockpitᚋccᚑbackendᚋinternalᚋgraphᚋmodelᚐNodeStatesᚄ(ctx, field.Selections, res)
} }
func (ec *executionContext) fieldContext_Query_nodeStats(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { func (ec *executionContext) fieldContext_Query_nodeStates(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) {
fc = &graphql.FieldContext{ fc = &graphql.FieldContext{
Object: "Query", Object: "Query",
Field: field, Field: field,
@@ -10955,11 +11071,11 @@ func (ec *executionContext) fieldContext_Query_nodeStats(ctx context.Context, fi
Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) {
switch field.Name { switch field.Name {
case "state": case "state":
return ec.fieldContext_NodeStats_state(ctx, field) return ec.fieldContext_NodeStates_state(ctx, field)
case "count": case "count":
return ec.fieldContext_NodeStats_count(ctx, field) return ec.fieldContext_NodeStates_count(ctx, field)
} }
return nil, fmt.Errorf("no field named %q was found under type NodeStats", field.Name) return nil, fmt.Errorf("no field named %q was found under type NodeStates", field.Name)
}, },
} }
defer func() { defer func() {
@@ -10969,7 +11085,7 @@ func (ec *executionContext) fieldContext_Query_nodeStats(ctx context.Context, fi
} }
}() }()
ctx = graphql.WithFieldContext(ctx, fc) ctx = graphql.WithFieldContext(ctx, fc)
if fc.Args, err = ec.field_Query_nodeStats_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { if fc.Args, err = ec.field_Query_nodeStates_args(ctx, field.ArgumentMap(ec.Variables)); err != nil {
ec.Error(ctx, err) ec.Error(ctx, err)
return fc, err return fc, err
} }
@@ -11379,6 +11495,8 @@ func (ec *executionContext) fieldContext_Query_jobsStatistics(ctx context.Contex
return ec.fieldContext_JobsStatistics_id(ctx, field) return ec.fieldContext_JobsStatistics_id(ctx, field)
case "name": case "name":
return ec.fieldContext_JobsStatistics_name(ctx, field) return ec.fieldContext_JobsStatistics_name(ctx, field)
case "totalUsers":
return ec.fieldContext_JobsStatistics_totalUsers(ctx, field)
case "totalJobs": case "totalJobs":
return ec.fieldContext_JobsStatistics_totalJobs(ctx, field) return ec.fieldContext_JobsStatistics_totalJobs(ctx, field)
case "runningJobs": case "runningJobs":
@@ -16549,7 +16667,7 @@ func (ec *executionContext) unmarshalInputNodeFilter(ctx context.Context, obj an
asMap[k] = v asMap[k] = v
} }
fieldsInOrder := [...]string{"hostname", "cluster", "nodeState", "healthState"} fieldsInOrder := [...]string{"hostname", "cluster", "subcluster", "nodeState", "healthState"}
for _, k := range fieldsInOrder { for _, k := range fieldsInOrder {
v, ok := asMap[k] v, ok := asMap[k]
if !ok { if !ok {
@@ -16570,6 +16688,13 @@ func (ec *executionContext) unmarshalInputNodeFilter(ctx context.Context, obj an
return it, err return it, err
} }
it.Cluster = data it.Cluster = data
case "subcluster":
ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("subcluster"))
data, err := ec.unmarshalOStringInput2ᚖgithubᚗcomᚋClusterCockpitᚋccᚑbackendᚋinternalᚋgraphᚋmodelᚐStringInput(ctx, v)
if err != nil {
return it, err
}
it.Subcluster = data
case "nodeState": case "nodeState":
ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("nodeState")) ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("nodeState"))
data, err := ec.unmarshalONodeState2ᚖstring(ctx, v) data, err := ec.unmarshalONodeState2ᚖstring(ctx, v)
@@ -17976,6 +18101,11 @@ func (ec *executionContext) _JobsStatistics(ctx context.Context, sel ast.Selecti
if out.Values[i] == graphql.Null { if out.Values[i] == graphql.Null {
out.Invalids++ out.Invalids++
} }
case "totalUsers":
out.Values[i] = ec._JobsStatistics_totalUsers(ctx, field, obj)
if out.Values[i] == graphql.Null {
out.Invalids++
}
case "totalJobs": case "totalJobs":
out.Values[i] = ec._JobsStatistics_totalJobs(ctx, field, obj) out.Values[i] = ec._JobsStatistics_totalJobs(ctx, field, obj)
if out.Values[i] == graphql.Null { if out.Values[i] == graphql.Null {
@@ -18625,6 +18755,42 @@ func (ec *executionContext) _Node(ctx context.Context, sel ast.SelectionSet, obj
if out.Values[i] == graphql.Null { if out.Values[i] == graphql.Null {
atomic.AddUint32(&out.Invalids, 1) atomic.AddUint32(&out.Invalids, 1)
} }
case "runningJobs":
field := field
innerFunc := func(ctx context.Context, fs *graphql.FieldSet) (res graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
}
}()
res = ec._Node_runningJobs(ctx, field, obj)
if res == graphql.Null {
atomic.AddUint32(&fs.Invalids, 1)
}
return res
}
if field.Deferrable != nil {
dfs, ok := deferred[field.Deferrable.Label]
di := 0
if ok {
dfs.AddField(field)
di = len(dfs.Values) - 1
} else {
dfs = graphql.NewFieldSet([]graphql.CollectedField{field})
deferred[field.Deferrable.Label] = dfs
}
dfs.Concurrently(di, func(ctx context.Context) graphql.Marshaler {
return innerFunc(ctx, dfs)
})
// don't run the out.Concurrently() call below
out.Values[i] = graphql.Null
continue
}
out.Concurrently(i, func(ctx context.Context) graphql.Marshaler { return innerFunc(ctx, out) })
case "nodeState": case "nodeState":
field := field field := field
@@ -18661,7 +18827,7 @@ func (ec *executionContext) _Node(ctx context.Context, sel ast.SelectionSet, obj
} }
out.Concurrently(i, func(ctx context.Context) graphql.Marshaler { return innerFunc(ctx, out) }) out.Concurrently(i, func(ctx context.Context) graphql.Marshaler { return innerFunc(ctx, out) })
case "HealthState": case "healthState":
field := field field := field
innerFunc := func(ctx context.Context, fs *graphql.FieldSet) (res graphql.Marshaler) { innerFunc := func(ctx context.Context, fs *graphql.FieldSet) (res graphql.Marshaler) {
@@ -18670,7 +18836,7 @@ func (ec *executionContext) _Node(ctx context.Context, sel ast.SelectionSet, obj
ec.Error(ctx, ec.Recover(ctx, r)) ec.Error(ctx, ec.Recover(ctx, r))
} }
}() }()
res = ec._Node_HealthState(ctx, field, obj) res = ec._Node_healthState(ctx, field, obj)
if res == graphql.Null { if res == graphql.Null {
atomic.AddUint32(&fs.Invalids, 1) atomic.AddUint32(&fs.Invalids, 1)
} }
@@ -18843,24 +19009,24 @@ func (ec *executionContext) _NodeStateResultList(ctx context.Context, sel ast.Se
return out return out
} }
var nodeStatsImplementors = []string{"NodeStats"} var nodeStatesImplementors = []string{"NodeStates"}
func (ec *executionContext) _NodeStats(ctx context.Context, sel ast.SelectionSet, obj *model.NodeStats) graphql.Marshaler { func (ec *executionContext) _NodeStates(ctx context.Context, sel ast.SelectionSet, obj *model.NodeStates) graphql.Marshaler {
fields := graphql.CollectFields(ec.OperationContext, sel, nodeStatsImplementors) fields := graphql.CollectFields(ec.OperationContext, sel, nodeStatesImplementors)
out := graphql.NewFieldSet(fields) out := graphql.NewFieldSet(fields)
deferred := make(map[string]*graphql.FieldSet) deferred := make(map[string]*graphql.FieldSet)
for i, field := range fields { for i, field := range fields {
switch field.Name { switch field.Name {
case "__typename": case "__typename":
out.Values[i] = graphql.MarshalString("NodeStats") out.Values[i] = graphql.MarshalString("NodeStates")
case "state": case "state":
out.Values[i] = ec._NodeStats_state(ctx, field, obj) out.Values[i] = ec._NodeStates_state(ctx, field, obj)
if out.Values[i] == graphql.Null { if out.Values[i] == graphql.Null {
out.Invalids++ out.Invalids++
} }
case "count": case "count":
out.Values[i] = ec._NodeStats_count(ctx, field, obj) out.Values[i] = ec._NodeStates_count(ctx, field, obj)
if out.Values[i] == graphql.Null { if out.Values[i] == graphql.Null {
out.Invalids++ out.Invalids++
} }
@@ -19103,7 +19269,7 @@ func (ec *executionContext) _Query(ctx context.Context, sel ast.SelectionSet) gr
} }
out.Concurrently(i, func(ctx context.Context) graphql.Marshaler { return rrm(innerCtx) }) out.Concurrently(i, func(ctx context.Context) graphql.Marshaler { return rrm(innerCtx) })
case "nodeStats": case "nodeStates":
field := field field := field
innerFunc := func(ctx context.Context, fs *graphql.FieldSet) (res graphql.Marshaler) { innerFunc := func(ctx context.Context, fs *graphql.FieldSet) (res graphql.Marshaler) {
@@ -19112,7 +19278,7 @@ func (ec *executionContext) _Query(ctx context.Context, sel ast.SelectionSet) gr
ec.Error(ctx, ec.Recover(ctx, r)) ec.Error(ctx, ec.Recover(ctx, r))
} }
}() }()
res = ec._Query_nodeStats(ctx, field) res = ec._Query_nodeStates(ctx, field)
if res == graphql.Null { if res == graphql.Null {
atomic.AddUint32(&fs.Invalids, 1) atomic.AddUint32(&fs.Invalids, 1)
} }
@@ -21756,7 +21922,7 @@ func (ec *executionContext) marshalNNodeStateResultList2ᚖgithubᚗcomᚋCluste
return ec._NodeStateResultList(ctx, sel, v) return ec._NodeStateResultList(ctx, sel, v)
} }
func (ec *executionContext) marshalNNodeStats2ᚕᚖgithubᚗcomᚋClusterCockpitᚋccᚑbackendᚋinternalᚋgraphᚋmodelᚐNodeStatsᚄ(ctx context.Context, sel ast.SelectionSet, v []*model.NodeStats) graphql.Marshaler { func (ec *executionContext) marshalNNodeStates2ᚕᚖgithubᚗcomᚋClusterCockpitᚋccᚑbackendᚋinternalᚋgraphᚋmodelᚐNodeStatesᚄ(ctx context.Context, sel ast.SelectionSet, v []*model.NodeStates) graphql.Marshaler {
ret := make(graphql.Array, len(v)) ret := make(graphql.Array, len(v))
var wg sync.WaitGroup var wg sync.WaitGroup
isLen1 := len(v) == 1 isLen1 := len(v) == 1
@@ -21780,7 +21946,7 @@ func (ec *executionContext) marshalNNodeStats2ᚕᚖgithubᚗcomᚋClusterCockpi
if !isLen1 { if !isLen1 {
defer wg.Done() defer wg.Done()
} }
ret[i] = ec.marshalNNodeStats2ᚖgithubᚗcomᚋClusterCockpitᚋccᚑbackendᚋinternalᚋgraphᚋmodelᚐNodeStats(ctx, sel, v[i]) ret[i] = ec.marshalNNodeStates2ᚖgithubᚗcomᚋClusterCockpitᚋccᚑbackendᚋinternalᚋgraphᚋmodelᚐNodeStates(ctx, sel, v[i])
} }
if isLen1 { if isLen1 {
f(i) f(i)
@@ -21800,14 +21966,14 @@ func (ec *executionContext) marshalNNodeStats2ᚕᚖgithubᚗcomᚋClusterCockpi
return ret return ret
} }
func (ec *executionContext) marshalNNodeStats2ᚖgithubᚗcomᚋClusterCockpitᚋccᚑbackendᚋinternalᚋgraphᚋmodelᚐNodeStats(ctx context.Context, sel ast.SelectionSet, v *model.NodeStats) graphql.Marshaler { func (ec *executionContext) marshalNNodeStates2ᚖgithubᚗcomᚋClusterCockpitᚋccᚑbackendᚋinternalᚋgraphᚋmodelᚐNodeStates(ctx context.Context, sel ast.SelectionSet, v *model.NodeStates) graphql.Marshaler {
if v == nil { if v == nil {
if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) {
ec.Errorf(ctx, "the requested element is null which the schema does not allow") ec.Errorf(ctx, "the requested element is null which the schema does not allow")
} }
return graphql.Null return graphql.Null
} }
return ec._NodeStats(ctx, sel, v) return ec._NodeStates(ctx, sel, v)
} }
func (ec *executionContext) marshalNNodesResultList2githubᚗcomᚋClusterCockpitᚋccᚑbackendᚋinternalᚋgraphᚋmodelᚐNodesResultList(ctx context.Context, sel ast.SelectionSet, v model.NodesResultList) graphql.Marshaler { func (ec *executionContext) marshalNNodesResultList2githubᚗcomᚋClusterCockpitᚋccᚑbackendᚋinternalᚋgraphᚋmodelᚐNodesResultList(ctx context.Context, sel ast.SelectionSet, v model.NodesResultList) graphql.Marshaler {

View File

@@ -114,6 +114,7 @@ type JobStats struct {
type JobsStatistics struct { type JobsStatistics struct {
ID string `json:"id"` ID string `json:"id"`
Name string `json:"name"` Name string `json:"name"`
TotalUsers int `json:"totalUsers"`
TotalJobs int `json:"totalJobs"` TotalJobs int `json:"totalJobs"`
RunningJobs int `json:"runningJobs"` RunningJobs int `json:"runningJobs"`
ShortJobs int `json:"shortJobs"` ShortJobs int `json:"shortJobs"`
@@ -172,6 +173,7 @@ type NamedStatsWithScope struct {
type NodeFilter struct { type NodeFilter struct {
Hostname *StringInput `json:"hostname,omitempty"` Hostname *StringInput `json:"hostname,omitempty"`
Cluster *StringInput `json:"cluster,omitempty"` Cluster *StringInput `json:"cluster,omitempty"`
Subcluster *StringInput `json:"subcluster,omitempty"`
NodeState *string `json:"nodeState,omitempty"` NodeState *string `json:"nodeState,omitempty"`
HealthState *schema.NodeState `json:"healthState,omitempty"` HealthState *schema.NodeState `json:"healthState,omitempty"`
} }
@@ -187,7 +189,7 @@ type NodeStateResultList struct {
Count *int `json:"count,omitempty"` Count *int `json:"count,omitempty"`
} }
type NodeStats struct { type NodeStates struct {
State string `json:"state"` State string `json:"state"`
Count int `json:"count"` Count int `json:"count"`
} }
@@ -248,20 +250,22 @@ type User struct {
type Aggregate string type Aggregate string
const ( const (
AggregateUser Aggregate = "USER" AggregateUser Aggregate = "USER"
AggregateProject Aggregate = "PROJECT" AggregateProject Aggregate = "PROJECT"
AggregateCluster Aggregate = "CLUSTER" AggregateCluster Aggregate = "CLUSTER"
AggregateSubcluster Aggregate = "SUBCLUSTER"
) )
var AllAggregate = []Aggregate{ var AllAggregate = []Aggregate{
AggregateUser, AggregateUser,
AggregateProject, AggregateProject,
AggregateCluster, AggregateCluster,
AggregateSubcluster,
} }
func (e Aggregate) IsValid() bool { func (e Aggregate) IsValid() bool {
switch e { switch e {
case AggregateUser, AggregateProject, AggregateCluster: case AggregateUser, AggregateProject, AggregateCluster, AggregateSubcluster:
return true return true
} }
return false return false
@@ -307,6 +311,7 @@ type SortByAggregate string
const ( const (
SortByAggregateTotalwalltime SortByAggregate = "TOTALWALLTIME" SortByAggregateTotalwalltime SortByAggregate = "TOTALWALLTIME"
SortByAggregateTotaljobs SortByAggregate = "TOTALJOBS" SortByAggregateTotaljobs SortByAggregate = "TOTALJOBS"
SortByAggregateTotalusers SortByAggregate = "TOTALUSERS"
SortByAggregateTotalnodes SortByAggregate = "TOTALNODES" SortByAggregateTotalnodes SortByAggregate = "TOTALNODES"
SortByAggregateTotalnodehours SortByAggregate = "TOTALNODEHOURS" SortByAggregateTotalnodehours SortByAggregate = "TOTALNODEHOURS"
SortByAggregateTotalcores SortByAggregate = "TOTALCORES" SortByAggregateTotalcores SortByAggregate = "TOTALCORES"
@@ -318,6 +323,7 @@ const (
var AllSortByAggregate = []SortByAggregate{ var AllSortByAggregate = []SortByAggregate{
SortByAggregateTotalwalltime, SortByAggregateTotalwalltime,
SortByAggregateTotaljobs, SortByAggregateTotaljobs,
SortByAggregateTotalusers,
SortByAggregateTotalnodes, SortByAggregateTotalnodes,
SortByAggregateTotalnodehours, SortByAggregateTotalnodehours,
SortByAggregateTotalcores, SortByAggregateTotalcores,
@@ -328,7 +334,7 @@ var AllSortByAggregate = []SortByAggregate{
func (e SortByAggregate) IsValid() bool { func (e SortByAggregate) IsValid() bool {
switch e { switch e {
case SortByAggregateTotalwalltime, SortByAggregateTotaljobs, SortByAggregateTotalnodes, SortByAggregateTotalnodehours, SortByAggregateTotalcores, SortByAggregateTotalcorehours, SortByAggregateTotalaccs, SortByAggregateTotalacchours: case SortByAggregateTotalwalltime, SortByAggregateTotaljobs, SortByAggregateTotalusers, SortByAggregateTotalnodes, SortByAggregateTotalnodehours, SortByAggregateTotalcores, SortByAggregateTotalcorehours, SortByAggregateTotalaccs, SortByAggregateTotalacchours:
return true return true
} }
return false return false

View File

@@ -305,14 +305,20 @@ func (r *mutationResolver) UpdateConfiguration(ctx context.Context, name string,
return nil, nil return nil, nil
} }
// NodeState is the resolver for the nodeState field. // RunningJobs is the resolver for the runningJobs field.
func (r *nodeResolver) NodeState(ctx context.Context, obj *schema.Node) (string, error) { func (r *nodeResolver) RunningJobs(ctx context.Context, obj *schema.Node) (int, error) {
panic(fmt.Errorf("not implemented: NodeState - nodeState")) panic(fmt.Errorf("not implemented: RunningJobs - runningJobs"))
} }
// HealthState is the resolver for the HealthState field. // NodeState is the resolver for the nodeState field.
func (r *nodeResolver) NodeState(ctx context.Context, obj *schema.Node) (string, error) {
return string(obj.NodeState), nil
}
// HealthState is the resolver for the healthState field.
func (r *nodeResolver) HealthState(ctx context.Context, obj *schema.Node) (schema.NodeState, error) { func (r *nodeResolver) HealthState(ctx context.Context, obj *schema.Node) (schema.NodeState, error) {
panic(fmt.Errorf("not implemented: HealthState - HealthState")) // FIXME: Why is Output of schema.NodeState Type?
panic(fmt.Errorf("not implemented: HealthState - healthState"))
} }
// MetaData is the resolver for the metaData field. // MetaData is the resolver for the metaData field.
@@ -378,9 +384,26 @@ func (r *queryResolver) Nodes(ctx context.Context, filter []*model.NodeFilter, o
return &model.NodeStateResultList{Items: nodes, Count: &count}, err return &model.NodeStateResultList{Items: nodes, Count: &count}, err
} }
// NodeStats is the resolver for the nodeStats field. // NodeStates is the resolver for the nodeStates field.
func (r *queryResolver) NodeStats(ctx context.Context, filter []*model.NodeFilter) ([]*model.NodeStats, error) { func (r *queryResolver) NodeStates(ctx context.Context, filter []*model.NodeFilter) ([]*model.NodeStates, error) {
panic(fmt.Errorf("not implemented: NodeStats - nodeStats")) repo := repository.GetNodeRepository()
stateCounts, serr := repo.CountNodeStates(ctx, filter)
if serr != nil {
cclog.Warnf("Error while counting nodeStates: %s", serr.Error())
return nil, serr
}
healthCounts, herr := repo.CountHealthStates(ctx, filter)
if herr != nil {
cclog.Warnf("Error while counting healthStates: %s", herr.Error())
return nil, herr
}
allCounts := make([]*model.NodeStates, 0)
allCounts = append(stateCounts, healthCounts...)
return allCounts, nil
} }
// Job is the resolver for the job field. // Job is the resolver for the job field.
@@ -558,7 +581,7 @@ func (r *queryResolver) JobsStatistics(ctx context.Context, filter []*model.JobF
defaultDurationBins := "1h" defaultDurationBins := "1h"
defaultMetricBins := 10 defaultMetricBins := 10
if requireField(ctx, "totalJobs") || requireField(ctx, "totalWalltime") || requireField(ctx, "totalNodes") || requireField(ctx, "totalCores") || if requireField(ctx, "totalJobs") || requireField(ctx, "totalUsers") || requireField(ctx, "totalWalltime") || requireField(ctx, "totalNodes") || requireField(ctx, "totalCores") ||
requireField(ctx, "totalAccs") || requireField(ctx, "totalNodeHours") || requireField(ctx, "totalCoreHours") || requireField(ctx, "totalAccHours") { requireField(ctx, "totalAccs") || requireField(ctx, "totalNodeHours") || requireField(ctx, "totalCoreHours") || requireField(ctx, "totalAccHours") {
if groupBy == nil { if groupBy == nil {
stats, err = r.Repo.JobsStats(ctx, filter) stats, err = r.Repo.JobsStats(ctx, filter)

View File

@@ -40,7 +40,7 @@ func InitDB() error {
} }
tags := make(map[string]int64) tags := make(map[string]int64)
// Not using log.Print because we want the line to end with `\r` and // Not using cclog.Print because we want the line to end with `\r` and
// this function is only ever called when a special command line flag // this function is only ever called when a special command line flag
// is passed anyways. // is passed anyways.
fmt.Printf("%d jobs inserted...\r", 0) fmt.Printf("%d jobs inserted...\r", 0)

View File

@@ -337,10 +337,10 @@ func (r *JobRepository) FindColumnValue(user *schema.User, searchterm string, ta
// theSql, args, theErr := theQuery.ToSql() // theSql, args, theErr := theQuery.ToSql()
// if theErr != nil { // if theErr != nil {
// log.Warn("Error while converting query to sql") // cclog.Warn("Error while converting query to sql")
// return "", err // return "", err
// } // }
// log.Debugf("SQL query (FindColumnValue): `%s`, args: %#v", theSql, args) // cclog.Debugf("SQL query (FindColumnValue): `%s`, args: %#v", theSql, args)
err := theQuery.RunWith(r.stmtCache).QueryRow().Scan(&result) err := theQuery.RunWith(r.stmtCache).QueryRow().Scan(&result)

View File

@@ -4,12 +4,13 @@ CREATE TABLE "node" (
hostname VARCHAR(255) NOT NULL, hostname VARCHAR(255) NOT NULL,
cluster VARCHAR(255) NOT NULL, cluster VARCHAR(255) NOT NULL,
subcluster VARCHAR(255) NOT NULL, subcluster VARCHAR(255) NOT NULL,
cpus_allocated INTEGER NOT NULL, jobs_running INTEGER DEFAULT 0 NOT NULL,
cpus_total INTEGER NOT NULL, cpus_allocated INTEGER DEFAULT 0 NOT NULL,
memory_allocated INTEGER NOT NULL, cpus_total INTEGER DEFAULT 0 NOT NULL,
memory_total INTEGER NOT NULL, memory_allocated INTEGER DEFAULT 0 NOT NULL,
gpus_allocated INTEGER NOT NULL, memory_total INTEGER DEFAULT 0 NOT NULL,
gpus_total INTEGER NOT NULL, gpus_allocated INTEGER DEFAULT 0 NOT NULL,
gpus_total INTEGER DEFAULT 0 NOT NULL,
node_state VARCHAR(255) NOT NULL node_state VARCHAR(255) NOT NULL
CHECK (node_state IN ( CHECK (node_state IN (
'allocated', 'reserved', 'idle', 'mixed', 'allocated', 'reserved', 'idle', 'mixed',

View File

@@ -49,6 +49,12 @@ func GetNodeRepository() *NodeRepository {
return nodeRepoInstance return nodeRepoInstance
} }
var nodeColumns []string = []string{
// "node.id,"
"node.hostname", "node.cluster", "node.subcluster",
"node.node_state", "node.health_state", // "node.meta_data",
}
func (r *NodeRepository) FetchMetadata(node *schema.Node) (map[string]string, error) { func (r *NodeRepository) FetchMetadata(node *schema.Node) (map[string]string, error) {
start := time.Now() start := time.Now()
cachekey := fmt.Sprintf("metadata:%d", node.ID) cachekey := fmt.Sprintf("metadata:%d", node.ID)
@@ -218,9 +224,9 @@ func (r *NodeRepository) DeleteNode(id int64) error {
func (r *NodeRepository) QueryNodes( func (r *NodeRepository) QueryNodes(
ctx context.Context, ctx context.Context,
filters []*model.NodeFilter, filters []*model.NodeFilter,
order *model.OrderByInput, order *model.OrderByInput, // Currently unused!
) ([]*schema.Node, error) { ) ([]*schema.Node, error) {
query, qerr := SecurityCheck(ctx, sq.Select(jobColumns...).From("node")) query, qerr := AccessCheck(ctx, sq.Select(nodeColumns...).From("node"))
if qerr != nil { if qerr != nil {
return nil, qerr return nil, qerr
} }
@@ -232,6 +238,9 @@ func (r *NodeRepository) QueryNodes(
if f.Cluster != nil { if f.Cluster != nil {
query = buildStringCondition("node.cluster", f.Cluster, query) query = buildStringCondition("node.cluster", f.Cluster, query)
} }
if f.Subcluster != nil {
query = buildStringCondition("node.subcluster", f.Subcluster, query)
}
if f.NodeState != nil { if f.NodeState != nil {
query = query.Where("node.node_state = ?", f.NodeState) query = query.Where("node.node_state = ?", f.NodeState)
} }
@@ -287,3 +296,123 @@ func (r *NodeRepository) ListNodes(cluster string) ([]*schema.Node, error) {
return nodeList, nil return nodeList, nil
} }
func (r *NodeRepository) CountNodeStates(ctx context.Context, filters []*model.NodeFilter) ([]*model.NodeStates, error) {
query, qerr := AccessCheck(ctx, sq.Select("node_state AS state", "count(*) AS count").From("node"))
if qerr != nil {
return nil, qerr
}
for _, f := range filters {
if f.Hostname != nil {
query = buildStringCondition("node.hostname", f.Hostname, query)
}
if f.Cluster != nil {
query = buildStringCondition("node.cluster", f.Cluster, query)
}
if f.Subcluster != nil {
query = buildStringCondition("node.subcluster", f.Subcluster, query)
}
if f.NodeState != nil {
query = query.Where("node.node_state = ?", f.NodeState)
}
if f.HealthState != nil {
query = query.Where("node.health_state = ?", f.HealthState)
}
}
// Add Group and Order
query = query.GroupBy("state").OrderBy("count DESC")
rows, err := query.RunWith(r.stmtCache).Query()
if err != nil {
queryString, queryVars, _ := query.ToSql()
cclog.Errorf("Error while running query '%s' %v: %v", queryString, queryVars, err)
return nil, err
}
nodes := make([]*model.NodeStates, 0)
for rows.Next() {
node := model.NodeStates{}
if err := rows.Scan(&node.State, &node.Count); err != nil {
rows.Close()
cclog.Warn("Error while scanning rows (NodeStates)")
return nil, err
}
nodes = append(nodes, &node)
}
return nodes, nil
}
func (r *NodeRepository) CountHealthStates(ctx context.Context, filters []*model.NodeFilter) ([]*model.NodeStates, error) {
query, qerr := AccessCheck(ctx, sq.Select("health_state AS state", "count(*) AS count").From("node"))
if qerr != nil {
return nil, qerr
}
for _, f := range filters {
if f.Hostname != nil {
query = buildStringCondition("node.hostname", f.Hostname, query)
}
if f.Cluster != nil {
query = buildStringCondition("node.cluster", f.Cluster, query)
}
if f.Subcluster != nil {
query = buildStringCondition("node.subcluster", f.Subcluster, query)
}
if f.NodeState != nil {
query = query.Where("node.node_state = ?", f.NodeState)
}
if f.HealthState != nil {
query = query.Where("node.health_state = ?", f.HealthState)
}
}
// Add Group and Order
query = query.GroupBy("state").OrderBy("count DESC")
rows, err := query.RunWith(r.stmtCache).Query()
if err != nil {
queryString, queryVars, _ := query.ToSql()
cclog.Errorf("Error while running query '%s' %v: %v", queryString, queryVars, err)
return nil, err
}
nodes := make([]*model.NodeStates, 0)
for rows.Next() {
node := model.NodeStates{}
if err := rows.Scan(&node.State, &node.Count); err != nil {
rows.Close()
cclog.Warn("Error while scanning rows (NodeStates)")
return nil, err
}
nodes = append(nodes, &node)
}
return nodes, nil
}
func AccessCheck(ctx context.Context, query sq.SelectBuilder) (sq.SelectBuilder, error) {
user := GetUserFromContext(ctx)
return AccessCheckWithUser(user, query)
}
func AccessCheckWithUser(user *schema.User, query sq.SelectBuilder) (sq.SelectBuilder, error) {
if user == nil {
var qnil sq.SelectBuilder
return qnil, fmt.Errorf("user context is nil")
}
switch {
// case len(user.Roles) == 1 && user.HasRole(schema.RoleApi): // API-User : Access NodeInfos
// return query, nil
case user.HasAnyRole([]schema.Role{schema.RoleAdmin, schema.RoleSupport}): // Admin & Support : Access NodeInfos
return query, nil
default: // No known Role: No Access, return error
var qnil sq.SelectBuilder
return qnil, fmt.Errorf("user has no or unknown roles")
}
}

View File

@@ -28,6 +28,7 @@ var groupBy2column = map[model.Aggregate]string{
var sortBy2column = map[model.SortByAggregate]string{ var sortBy2column = map[model.SortByAggregate]string{
model.SortByAggregateTotaljobs: "totalJobs", model.SortByAggregateTotaljobs: "totalJobs",
model.SortByAggregateTotalusers: "totalUsers",
model.SortByAggregateTotalwalltime: "totalWalltime", model.SortByAggregateTotalwalltime: "totalWalltime",
model.SortByAggregateTotalnodes: "totalNodes", model.SortByAggregateTotalnodes: "totalNodes",
model.SortByAggregateTotalnodehours: "totalNodeHours", model.SortByAggregateTotalnodehours: "totalNodeHours",
@@ -76,8 +77,12 @@ func (r *JobRepository) buildStatsQuery(
// fmt.Sprintf(`CAST(ROUND((CASE WHEN job.job_state = "running" THEN %d - job.start_time ELSE job.duration END) / 3600) as %s) as value`, time.Now().Unix(), castType) // fmt.Sprintf(`CAST(ROUND((CASE WHEN job.job_state = "running" THEN %d - job.start_time ELSE job.duration END) / 3600) as %s) as value`, time.Now().Unix(), castType)
if col != "" { if col != "" {
// Scan columns: id, totalJobs, name, totalWalltime, totalNodes, totalNodeHours, totalCores, totalCoreHours, totalAccs, totalAccHours // Scan columns: id, name, totalJobs, totalUsers, totalWalltime, totalNodes, totalNodeHours, totalCores, totalCoreHours, totalAccs, totalAccHours
query = sq.Select(col, "COUNT(job.id) as totalJobs", "name", query = sq.Select(
col,
"name",
"COUNT(job.id) as totalJobs",
"COUNT(DISTINCT job.hpc_user) AS totalUsers",
fmt.Sprintf(`CAST(ROUND(SUM((CASE WHEN job.job_state = "running" THEN %d - job.start_time ELSE job.duration END)) / 3600) as %s) as totalWalltime`, time.Now().Unix(), castType), fmt.Sprintf(`CAST(ROUND(SUM((CASE WHEN job.job_state = "running" THEN %d - job.start_time ELSE job.duration END)) / 3600) as %s) as totalWalltime`, time.Now().Unix(), castType),
fmt.Sprintf(`CAST(SUM(job.num_nodes) as %s) as totalNodes`, castType), fmt.Sprintf(`CAST(SUM(job.num_nodes) as %s) as totalNodes`, castType),
fmt.Sprintf(`CAST(ROUND(SUM((CASE WHEN job.job_state = "running" THEN %d - job.start_time ELSE job.duration END) * job.num_nodes) / 3600) as %s) as totalNodeHours`, time.Now().Unix(), castType), fmt.Sprintf(`CAST(ROUND(SUM((CASE WHEN job.job_state = "running" THEN %d - job.start_time ELSE job.duration END) * job.num_nodes) / 3600) as %s) as totalNodeHours`, time.Now().Unix(), castType),
@@ -87,8 +92,10 @@ func (r *JobRepository) buildStatsQuery(
fmt.Sprintf(`CAST(ROUND(SUM((CASE WHEN job.job_state = "running" THEN %d - job.start_time ELSE job.duration END) * job.num_acc) / 3600) as %s) as totalAccHours`, time.Now().Unix(), castType), fmt.Sprintf(`CAST(ROUND(SUM((CASE WHEN job.job_state = "running" THEN %d - job.start_time ELSE job.duration END) * job.num_acc) / 3600) as %s) as totalAccHours`, time.Now().Unix(), castType),
).From("job").LeftJoin("hpc_user ON hpc_user.username = job.hpc_user").GroupBy(col) ).From("job").LeftJoin("hpc_user ON hpc_user.username = job.hpc_user").GroupBy(col)
} else { } else {
// Scan columns: totalJobs, name, totalWalltime, totalNodes, totalNodeHours, totalCores, totalCoreHours, totalAccs, totalAccHours // Scan columns: totalJobs, totalUsers, totalWalltime, totalNodes, totalNodeHours, totalCores, totalCoreHours, totalAccs, totalAccHours
query = sq.Select("COUNT(job.id)", query = sq.Select(
"COUNT(job.id) as totalJobs",
"COUNT(DISTINCT job.hpc_user) AS totalUsers",
fmt.Sprintf(`CAST(ROUND(SUM((CASE WHEN job.job_state = "running" THEN %d - job.start_time ELSE job.duration END)) / 3600) as %s)`, time.Now().Unix(), castType), fmt.Sprintf(`CAST(ROUND(SUM((CASE WHEN job.job_state = "running" THEN %d - job.start_time ELSE job.duration END)) / 3600) as %s)`, time.Now().Unix(), castType),
fmt.Sprintf(`CAST(SUM(job.num_nodes) as %s)`, castType), fmt.Sprintf(`CAST(SUM(job.num_nodes) as %s)`, castType),
fmt.Sprintf(`CAST(ROUND(SUM((CASE WHEN job.job_state = "running" THEN %d - job.start_time ELSE job.duration END) * job.num_nodes) / 3600) as %s)`, time.Now().Unix(), castType), fmt.Sprintf(`CAST(ROUND(SUM((CASE WHEN job.job_state = "running" THEN %d - job.start_time ELSE job.duration END) * job.num_nodes) / 3600) as %s)`, time.Now().Unix(), castType),
@@ -167,14 +174,14 @@ func (r *JobRepository) JobsStatsGrouped(
for rows.Next() { for rows.Next() {
var id sql.NullString var id sql.NullString
var name sql.NullString var name sql.NullString
var jobs, walltime, nodes, nodeHours, cores, coreHours, accs, accHours sql.NullInt64 var jobs, users, walltime, nodes, nodeHours, cores, coreHours, accs, accHours sql.NullInt64
if err := rows.Scan(&id, &jobs, &name, &walltime, &nodes, &nodeHours, &cores, &coreHours, &accs, &accHours); err != nil { if err := rows.Scan(&id, &name, &jobs, &users, &walltime, &nodes, &nodeHours, &cores, &coreHours, &accs, &accHours); err != nil {
cclog.Warn("Error while scanning rows") cclog.Warn("Error while scanning rows")
return nil, err return nil, err
} }
if id.Valid { if id.Valid {
var totalJobs, totalWalltime, totalNodes, totalNodeHours, totalCores, totalCoreHours, totalAccs, totalAccHours int var totalJobs, totalUsers, totalWalltime, totalNodes, totalNodeHours, totalCores, totalCoreHours, totalAccs, totalAccHours int
var personName string var personName string
if name.Valid { if name.Valid {
@@ -185,6 +192,10 @@ func (r *JobRepository) JobsStatsGrouped(
totalJobs = int(jobs.Int64) totalJobs = int(jobs.Int64)
} }
if users.Valid {
totalUsers = int(users.Int64)
}
if walltime.Valid { if walltime.Valid {
totalWalltime = int(walltime.Int64) totalWalltime = int(walltime.Int64)
} }
@@ -228,8 +239,9 @@ func (r *JobRepository) JobsStatsGrouped(
stats = append(stats, stats = append(stats,
&model.JobsStatistics{ &model.JobsStatistics{
ID: id.String, ID: id.String,
TotalJobs: int(jobs.Int64), TotalJobs: totalJobs,
TotalWalltime: int(walltime.Int64), TotalUsers: totalUsers,
TotalWalltime: totalWalltime,
TotalNodes: totalNodes, TotalNodes: totalNodes,
TotalNodeHours: totalNodeHours, TotalNodeHours: totalNodeHours,
TotalCores: totalCores, TotalCores: totalCores,
@@ -259,8 +271,8 @@ func (r *JobRepository) JobsStats(
row := query.RunWith(r.DB).QueryRow() row := query.RunWith(r.DB).QueryRow()
stats := make([]*model.JobsStatistics, 0, 1) stats := make([]*model.JobsStatistics, 0, 1)
var jobs, walltime, nodes, nodeHours, cores, coreHours, accs, accHours sql.NullInt64 var jobs, users, walltime, nodes, nodeHours, cores, coreHours, accs, accHours sql.NullInt64
if err := row.Scan(&jobs, &walltime, &nodes, &nodeHours, &cores, &coreHours, &accs, &accHours); err != nil { if err := row.Scan(&jobs, &users, &walltime, &nodes, &nodeHours, &cores, &coreHours, &accs, &accHours); err != nil {
cclog.Warn("Error while scanning rows") cclog.Warn("Error while scanning rows")
return nil, err return nil, err
} }
@@ -280,6 +292,7 @@ func (r *JobRepository) JobsStats(
stats = append(stats, stats = append(stats,
&model.JobsStatistics{ &model.JobsStatistics{
TotalJobs: int(jobs.Int64), TotalJobs: int(jobs.Int64),
TotalUsers: int(users.Int64),
TotalWalltime: int(walltime.Int64), TotalWalltime: int(walltime.Int64),
TotalNodeHours: totalNodeHours, TotalNodeHours: totalNodeHours,
TotalCoreHours: totalCoreHours, TotalCoreHours: totalCoreHours,

View File

@@ -459,7 +459,7 @@
</tr> </tr>
{#each $topQuery.data.topList as te, i} {#each $topQuery.data.topList as te, i}
<tr> <tr>
<td><Icon name="circle-fill" style="color: {colors[i]};" /></td> <td><Icon name="circle-fill" style="color: {colors['colorblind'][i]};" /></td>
{#if groupSelection.key == "user"} {#if groupSelection.key == "user"}
<th scope="col" id="topName-{te.id}" <th scope="col" id="topName-{te.id}"
><a href="/monitoring/user/{te.id}?cluster={clusterName}" ><a href="/monitoring/user/{te.id}?cluster={clusterName}"

View File

@@ -2,709 +2,62 @@
@component Main cluster status view component; renders current system-usage information @component Main cluster status view component; renders current system-usage information
Properties: Properties:
- `cluster String`: The cluster to show status information for - `presetCluster String`: The cluster to show status information for
--> -->
<script> <script>
import { getContext } from "svelte"; import {
getContext
} from "svelte"
import { import {
Row, Row,
Col, Col,
Spinner,
Card, Card,
CardHeader,
CardTitle,
CardBody, CardBody,
Table, TabContent,
Progress, TabPane
Icon,
Button,
Tooltip
} from "@sveltestrap/sveltestrap"; } from "@sveltestrap/sveltestrap";
import {
queryStore, import StatusDash from "./status/StatusDash.svelte";
gql, import UsageDash from "./status/UsageDash.svelte";
getContextClient, import StatisticsDash from "./status/StatisticsDash.svelte";
mutationStore,
} from "@urql/svelte";
import {
init,
convert2uplot,
transformPerNodeDataForRoofline,
scramble,
scrambleNames,
} from "./generic/utils.js";
import { scaleNumbers } from "./generic/units.js";
import PlotGrid from "./generic/PlotGrid.svelte";
import Roofline from "./generic/plots/Roofline.svelte";
import Pie, { colors } from "./generic/plots/Pie.svelte";
import Histogram from "./generic/plots/Histogram.svelte";
import Refresher from "./generic/helper/Refresher.svelte";
import HistogramSelection from "./generic/select/HistogramSelection.svelte";
/* Svelte 5 Props */ /* Svelte 5 Props */
let { let {
cluster presetCluster
} = $props(); } = $props();
/* Const Init */ /*Const Init */
const { query: initq } = init(); const useCbColors = getContext("cc-config")?.plot_general_colorblindMode || false
const ccconfig = getContext("cc-config");
const client = getContextClient();
const paging = { itemsPerPage: 10, page: 1 }; // Top 10
const topOptions = [
{ key: "totalJobs", label: "Jobs" },
{ key: "totalNodes", label: "Nodes" },
{ key: "totalCores", label: "Cores" },
{ key: "totalAccs", label: "Accelerators" },
];
/* State Init */
let from = $state(new Date(Date.now() - 5 * 60 * 1000));
let to = $state(new Date(Date.now()));
let colWidth = $state(0);
let plotWidths = $state([]);
// Histrogram
let isHistogramSelectionOpen = $state(false);
let selectedHistograms = $state(cluster
? ccconfig[`user_view_histogramMetrics:${cluster}`] || ( ccconfig['user_view_histogramMetrics'] || [] )
: ccconfig['user_view_histogramMetrics'] || []);
// Bar Gauges
let allocatedNodes = $state({});
let flopRate = $state({});
let flopRateUnitPrefix = $state({});
let flopRateUnitBase = $state({});
let memBwRate = $state({});
let memBwRateUnitPrefix = $state({});
let memBwRateUnitBase = $state({});
// Pie Charts
let topProjectSelection = $state(
topOptions.find(
(option) =>
option.key ==
ccconfig[`status_view_selectedTopProjectCategory:${cluster}`],
) ||
topOptions.find(
(option) => option.key == ccconfig.status_view_selectedTopProjectCategory,
)
);
let topUserSelection = $state(
topOptions.find(
(option) =>
option.key ==
ccconfig[`status_view_selectedTopUserCategory:${cluster}`],
) ||
topOptions.find(
(option) => option.key == ccconfig.status_view_selectedTopUserCategory,
)
);
/* Derived */
// Note: nodeMetrics are requested on configured $timestep resolution
const mainQuery = $derived(queryStore({
client: client,
query: gql`
query (
$cluster: String!
$filter: [JobFilter!]!
$metrics: [String!]
$from: Time!
$to: Time!
$selectedHistograms: [String!]
) {
nodeMetrics(
cluster: $cluster
metrics: $metrics
from: $from
to: $to
) {
host
subCluster
metrics {
name
scope
metric {
timestep
unit {
base
prefix
}
series {
data
}
}
}
}
stats: jobsStatistics(filter: $filter, metrics: $selectedHistograms) {
histDuration {
count
value
}
histNumNodes {
count
value
}
histNumCores {
count
value
}
histNumAccs {
count
value
}
histMetrics {
metric
unit
data {
min
max
count
bin
}
}
}
allocatedNodes(cluster: $cluster) {
name
count
}
}
`,
variables: {
cluster: cluster,
metrics: ["flops_any", "mem_bw"], // Fixed names for roofline and status bars
from: from.toISOString(),
to: to.toISOString(),
filter: [{ state: ["running"] }, { cluster: { eq: cluster } }],
selectedHistograms: selectedHistograms,
},
}));
const topUserQuery = $derived(queryStore({
client: client,
query: gql`
query (
$filter: [JobFilter!]!
$paging: PageRequest!
$sortBy: SortByAggregate!
) {
topUser: jobsStatistics(
filter: $filter
page: $paging
sortBy: $sortBy
groupBy: USER
) {
id
name
totalJobs
totalNodes
totalCores
totalAccs
}
}
`,
variables: {
filter: [{ state: ["running"] }, { cluster: { eq: cluster } }],
paging,
sortBy: topUserSelection.key.toUpperCase(),
},
}));
const topProjectQuery = $derived(queryStore({
client: client,
query: gql`
query (
$filter: [JobFilter!]!
$paging: PageRequest!
$sortBy: SortByAggregate!
) {
topProjects: jobsStatistics(
filter: $filter
page: $paging
sortBy: $sortBy
groupBy: PROJECT
) {
id
totalJobs
totalNodes
totalCores
totalAccs
}
}
`,
variables: {
filter: [{ state: ["running"] }, { cluster: { eq: cluster } }],
paging,
sortBy: topProjectSelection.key.toUpperCase(),
},
}));
/* Effects */
$effect(() => {
if ($initq.data && $mainQuery.data) {
let subClusters = $initq.data.clusters.find(
(c) => c.name == cluster,
).subClusters;
for (let subCluster of subClusters) {
allocatedNodes[subCluster.name] =
$mainQuery.data.allocatedNodes.find(
({ name }) => name == subCluster.name,
)?.count || 0;
flopRate[subCluster.name] =
Math.floor(
sumUp($mainQuery.data.nodeMetrics, subCluster.name, "flops_any") *
100,
) / 100;
flopRateUnitPrefix[subCluster.name] = subCluster.flopRateSimd.unit.prefix;
flopRateUnitBase[subCluster.name] = subCluster.flopRateSimd.unit.base;
memBwRate[subCluster.name] =
Math.floor(
sumUp($mainQuery.data.nodeMetrics, subCluster.name, "mem_bw") * 100,
) / 100;
memBwRateUnitPrefix[subCluster.name] =
subCluster.memoryBandwidth.unit.prefix;
memBwRateUnitBase[subCluster.name] = subCluster.memoryBandwidth.unit.base;
}
}
});
$effect(() => {
updateTopUserConfiguration(topUserSelection.key);
});
$effect(() => {
updateTopProjectConfiguration(topProjectSelection.key);
});
/* Const Functions */
const sumUp = (data, subcluster, metric) =>
data.reduce(
(sum, node) =>
node.subCluster == subcluster
? sum +
(node.metrics
.find((m) => m.name == metric)
?.metric.series.reduce(
(sum, series) => sum + series.data[series.data.length - 1],
0,
) || 0)
: sum,
0,
);
const updateConfigurationMutation = ({ name, value }) => {
return mutationStore({
client: client,
query: gql`
mutation ($name: String!, $value: String!) {
updateConfiguration(name: $name, value: $value)
}
`,
variables: { name, value },
});
};
/* Functions */
function updateTopUserConfiguration(select) {
if (ccconfig[`status_view_selectedTopUserCategory:${cluster}`] != select) {
updateConfigurationMutation({
name: `status_view_selectedTopUserCategory:${cluster}`,
value: JSON.stringify(select),
}).subscribe((res) => {
if (res.fetching === false && res.error) {
throw res.error;
}
});
}
}
function updateTopProjectConfiguration(select) {
if (
ccconfig[`status_view_selectedTopProjectCategory:${cluster}`] != select
) {
updateConfigurationMutation({
name: `status_view_selectedTopProjectCategory:${cluster}`,
value: JSON.stringify(select),
}).subscribe((res) => {
if (res.fetching === false && res.error) {
throw res.error;
}
});
}
}
</script> </script>
<!-- Loading indicator & Refresh --> <!-- Loading indicator & Refresh -->
<Row cols={{ lg: 3, md: 3, sm: 1 }}> <Row cols={1} class="mb-2">
<Col style="">
<h4 class="mb-0">Current utilization of cluster "{cluster}"</h4>
</Col>
<Col class="mt-2 mt-md-0 text-md-end">
<Button
outline
color="secondary"
onclick={() => (isHistogramSelectionOpen = true)}
>
<Icon name="bar-chart-line" /> Select Histograms
</Button>
</Col>
<Col class="mt-2 mt-md-0">
<Refresher
initially={120}
onRefresh={() => {
from = new Date(Date.now() - 5 * 60 * 1000);
to = new Date(Date.now());
}}
/>
</Col>
</Row>
<Row cols={1} class="text-center mt-3">
<Col> <Col>
{#if $initq.fetching || $mainQuery.fetching} <h3 class="mb-0">Current Status of Cluster "{presetCluster.charAt(0).toUpperCase() + presetCluster.slice(1)}"</h3>
<Spinner />
{:else if $initq.error}
<Card body color="danger">{$initq.error.message}</Card>
{:else}
<!-- ... -->
{/if}
</Col> </Col>
</Row> </Row>
{#if $mainQuery.error}
<Row cols={1}>
<Col>
<Card body color="danger">{$mainQuery.error.message}</Card>
</Col>
</Row>
{/if}
<hr /> <Card class="overflow-auto" style="height: auto;">
<TabContent>
<TabPane tabId="status-dash" tab="Status" active>
<CardBody>
<StatusDash {presetCluster} {useCbColors} useAltColors></StatusDash>
</CardBody>
</TabPane>
<!-- Gauges & Roofline per Subcluster--> <TabPane tabId="usage-dash" tab="Usage">
<CardBody>
{#if $initq.data && $mainQuery.data} <UsageDash {presetCluster} {useCbColors}></UsageDash>
{#each $initq.data.clusters.find((c) => c.name == cluster).subClusters as subCluster, i} </CardBody>
<Row cols={{ lg: 2, md: 1 , sm: 1}} class="mb-3 justify-content-center"> </TabPane>
<Col class="px-3">
<Card class="h-auto mt-1">
<CardHeader>
<CardTitle class="mb-0">SubCluster "{subCluster.name}"</CardTitle>
</CardHeader>
<CardBody>
<Table borderless>
<tr class="py-2">
<th scope="col">Allocated Nodes</th>
<td style="min-width: 100px;"
><div class="col">
<Progress
value={allocatedNodes[subCluster.name]}
max={subCluster.numberOfNodes}
/>
</div></td
>
<td
>{allocatedNodes[subCluster.name]} / {subCluster.numberOfNodes}
Nodes</td
>
</tr>
<tr class="py-2">
<th scope="col"
>Flop Rate (Any) <Icon
name="info-circle"
class="p-1"
style="cursor: help;"
title="Flops[Any] = (Flops[Double] x 2) + Flops[Single]"
/></th
>
<td style="min-width: 100px;"
><div class="col">
<Progress
value={flopRate[subCluster.name]}
max={subCluster.flopRateSimd.value *
subCluster.numberOfNodes}
/>
</div></td
>
<td>
{scaleNumbers(
flopRate[subCluster.name],
subCluster.flopRateSimd.value * subCluster.numberOfNodes,
flopRateUnitPrefix[subCluster.name],
)}{flopRateUnitBase[subCluster.name]} [Max]
</td>
</tr>
<tr class="py-2">
<th scope="col">MemBw Rate</th>
<td style="min-width: 100px;"
><div class="col">
<Progress
value={memBwRate[subCluster.name]}
max={subCluster.memoryBandwidth.value *
subCluster.numberOfNodes}
/>
</div></td
>
<td>
{scaleNumbers(
memBwRate[subCluster.name],
subCluster.memoryBandwidth.value * subCluster.numberOfNodes,
memBwRateUnitPrefix[subCluster.name],
)}{memBwRateUnitBase[subCluster.name]} [Max]
</td>
</tr>
</Table>
</CardBody>
</Card>
</Col>
<Col class="px-3 mt-2 mt-lg-0">
<div bind:clientWidth={plotWidths[i]}>
{#key $mainQuery.data.nodeMetrics}
<Roofline
allowSizeChange
width={plotWidths[i] - 10}
height={300}
subCluster={subCluster}
data={transformPerNodeDataForRoofline(
$mainQuery.data.nodeMetrics.filter(
(data) => data.subCluster == subCluster.name,
),
)}
/>
{/key}
</div>
</Col>
</Row>
{/each}
<hr />
<!-- User and Project Stats as Pie-Charts -->
<Row cols={{ lg: 4, md: 2, sm: 1 }}>
<Col class="p-2">
<div bind:clientWidth={colWidth}>
<h4 class="text-center">
Top Users on {cluster.charAt(0).toUpperCase() + cluster.slice(1)}
</h4>
{#key $topUserQuery.data}
{#if $topUserQuery.fetching}
<Spinner />
{:else if $topUserQuery.error}
<Card body color="danger">{$topUserQuery.error.message}</Card>
{:else}
<Pie
canvasId="hpcpie-users"
size={colWidth}
sliceLabel={topUserSelection.label}
quantities={$topUserQuery.data.topUser.map(
(tu) => tu[topUserSelection.key],
)}
entities={$topUserQuery.data.topUser.map((tu) => scrambleNames ? scramble(tu.id) : tu.id)}
/>
{/if}
{/key}
</div>
</Col>
<Col class="px-4 py-2">
{#key $topUserQuery.data}
{#if $topUserQuery.fetching}
<Spinner />
{:else if $topUserQuery.error}
<Card body color="danger">{$topUserQuery.error.message}</Card>
{:else}
<Table>
<tr class="mb-2">
<th>Legend</th>
<th>User Name</th>
<th
>Number of
<select class="p-0" bind:value={topUserSelection}>
{#each topOptions as option}
<option value={option}>
{option.label}
</option>
{/each}
</select>
</th>
</tr>
{#each $topUserQuery.data.topUser as tu, i}
<tr>
<td><Icon name="circle-fill" style="color: {colors[i]};" /></td>
<th scope="col" id="topName-{tu.id}"
><a
href="/monitoring/user/{tu.id}?cluster={cluster}&state=running"
>{scrambleNames ? scramble(tu.id) : tu.id}</a
></th
>
{#if tu?.name}
<Tooltip
target={`topName-${tu.id}`}
placement="left"
>{scrambleNames ? scramble(tu.name) : tu.name}</Tooltip
>
{/if}
<td>{tu[topUserSelection.key]}</td>
</tr>
{/each}
</Table>
{/if}
{/key}
</Col>
<Col class="p-2">
<h4 class="text-center">
Top Projects on {cluster.charAt(0).toUpperCase() + cluster.slice(1)}
</h4>
{#key $topProjectQuery.data}
{#if $topProjectQuery.fetching}
<Spinner />
{:else if $topProjectQuery.error}
<Card body color="danger">{$topProjectQuery.error.message}</Card>
{:else}
<Pie
canvasId="hpcpie-projects"
size={colWidth}
sliceLabel={topProjectSelection.label}
quantities={$topProjectQuery.data.topProjects.map(
(tp) => tp[topProjectSelection.key],
)}
entities={$topProjectQuery.data.topProjects.map((tp) => scrambleNames ? scramble(tp.id) : tp.id)}
/>
{/if}
{/key}
</Col>
<Col class="px-4 py-2">
{#key $topProjectQuery.data}
{#if $topProjectQuery.fetching}
<Spinner />
{:else if $topProjectQuery.error}
<Card body color="danger">{$topProjectQuery.error.message}</Card>
{:else}
<Table>
<tr class="mb-2">
<th>Legend</th>
<th>Project Code</th>
<th
>Number of
<select class="p-0" bind:value={topProjectSelection}>
{#each topOptions as option}
<option value={option}>
{option.label}
</option>
{/each}
</select>
</th>
</tr>
{#each $topProjectQuery.data.topProjects as tp, i}
<tr>
<td><Icon name="circle-fill" style="color: {colors[i]};" /></td>
<th scope="col"
><a
href="/monitoring/jobs/?cluster={cluster}&state=running&project={tp.id}&projectMatch=eq"
>{scrambleNames ? scramble(tp.id) : tp.id}</a
></th
>
<td>{tp[topProjectSelection.key]}</td>
</tr>
{/each}
</Table>
{/if}
{/key}
</Col>
</Row>
<hr class="my-2" />
<!-- Static Stats as Histograms : Running Duration && Allocated Hardware Counts-->
<Row cols={{ lg: 2, md: 1 }}>
<Col class="p-2">
{#key $mainQuery.data.stats}
<Histogram
data={convert2uplot($mainQuery.data.stats[0].histDuration)}
title="Duration Distribution"
xlabel="Current Job Runtimes"
xunit="Runtime"
ylabel="Number of Jobs"
yunit="Jobs"
usesBins
xtime
/>
{/key}
</Col>
<Col class="p-2">
{#key $mainQuery.data.stats}
<Histogram
data={convert2uplot($mainQuery.data.stats[0].histNumNodes)}
title="Number of Nodes Distribution"
xlabel="Allocated Nodes"
xunit="Nodes"
ylabel="Number of Jobs"
yunit="Jobs"
/>
{/key}
</Col>
</Row>
<Row cols={{ lg: 2, md: 1 }}>
<Col class="p-2">
{#key $mainQuery.data.stats}
<Histogram
data={convert2uplot($mainQuery.data.stats[0].histNumCores)}
title="Number of Cores Distribution"
xlabel="Allocated Cores"
xunit="Cores"
ylabel="Number of Jobs"
yunit="Jobs"
/>
{/key}
</Col>
<Col class="p-2">
{#key $mainQuery.data.stats}
<Histogram
data={convert2uplot($mainQuery.data.stats[0].histNumAccs)}
title="Number of Accelerators Distribution"
xlabel="Allocated Accs"
xunit="Accs"
ylabel="Number of Jobs"
yunit="Jobs"
/>
{/key}
</Col>
</Row>
<hr class="my-2" />
<!-- Selectable Stats as Histograms : Average Values of Running Jobs -->
{#if selectedHistograms}
<!-- Note: Ignore '#snippet' Error in IDE -->
{#snippet gridContent(item)}
<Histogram
data={convert2uplot(item.data)}
title="Distribution of '{item.metric}' averages"
xlabel={`${item.metric} bin maximum ${item?.unit ? `[${item.unit}]` : ``}`}
xunit={item.unit}
ylabel="Number of Jobs"
yunit="Jobs"
usesBins
/>
{/snippet}
{#key $mainQuery.data.stats[0].histMetrics} <TabPane tabId="metric-dash" tab="Statistics">
<PlotGrid <CardBody>
items={$mainQuery.data.stats[0].histMetrics} <StatisticsDash {presetCluster} {useCbColors}></StatisticsDash>
itemsPerRow={2} </CardBody>
{gridContent} </TabPane>
/> </TabContent>
{/key} </Card>
{/if}
{/if}
<HistogramSelection
{cluster}
bind:isOpen={isHistogramSelectionOpen}
presetSelectedHistograms={selectedHistograms}
applyChange={(newSelection) => {
selectedHistograms = [...newSelection];
}}
/>

View File

@@ -404,6 +404,7 @@
cluster={selectedCluster} cluster={selectedCluster}
bind:isOpen={isHistogramSelectionOpen} bind:isOpen={isHistogramSelectionOpen}
presetSelectedHistograms={selectedHistograms} presetSelectedHistograms={selectedHistograms}
configName="user_view_histogramMetrics"
applyChange={(newSelection) => { applyChange={(newSelection) => {
selectedHistogramsBuffer[selectedCluster || 'all'] = [...newSelection]; selectedHistogramsBuffer[selectedCluster || 'all'] = [...newSelection];
}} }}

View File

@@ -26,7 +26,7 @@
/* Svelte 5 Props */ /* Svelte 5 Props */
let { let {
matchedCompareJobs = $bindable(0), matchedCompareJobs = $bindable(0),
metrics = ccconfig?.plot_list_selectedMetrics, metrics = getContext("cc-config")?.plot_list_selectedMetrics,
filterBuffer = [], filterBuffer = [],
} = $props(); } = $props();

View File

@@ -44,7 +44,7 @@
/* Const Init */ /* Const Init */
const clusterCockpitConfig = getContext("cc-config"); const clusterCockpitConfig = getContext("cc-config");
const lineWidth = clusterCockpitConfig.plot_general_lineWidth / window.devicePixelRatio; const lineWidth = clusterCockpitConfig?.plot_general_lineWidth / window.devicePixelRatio || 2;
const cbmode = clusterCockpitConfig?.plot_general_colorblindMode || false; const cbmode = clusterCockpitConfig?.plot_general_colorblindMode || false;
// UPLOT SERIES INIT // // UPLOT SERIES INIT //

View File

@@ -14,28 +14,59 @@
--> -->
<script module> <script module>
// http://tsitsul.in/blog/coloropt/ : 12 colors normal export const colors = {
export const colors = [ // https://www.learnui.design/tools/data-color-picker.html#divergent: 11, Shallow Green-Red
'rgb(235,172,35)', default: [
'rgb(184,0,88)', "#00876c",
'rgb(0,140,249)', "#449c6e",
'rgb(0,110,0)', "#70af6f",
'rgb(0,187,173)', "#9bc271",
'rgb(209,99,230)', "#c8d377",
'rgb(178,69,2)', "#f7e382",
'rgb(255,146,135)', "#f6c468",
'rgb(89,84,214)', "#f3a457",
'rgb(0,198,248)', "#ed834e",
'rgb(135,133,0)', "#e3614d",
'rgb(0,167,108)', "#d43d51",
'rgb(189,189,189)' ],
]; // https://www.learnui.design/tools/data-color-picker.html#palette: 12, Colorwheel-Like
alternative: [
"#0022bb",
"#ba0098",
"#fa0066",
"#ff6234",
"#ffae00",
"#b1af00",
"#67a630",
"#009753",
"#00836c",
"#006d77",
"#005671",
"#003f5c",
],
// http://tsitsul.in/blog/coloropt/ : 12 colors normal
colorblind: [
'rgb(235,172,35)',
'rgb(184,0,88)',
'rgb(0,140,249)',
'rgb(0,110,0)',
'rgb(0,187,173)',
'rgb(209,99,230)',
'rgb(178,69,2)',
'rgb(255,146,135)',
'rgb(89,84,214)',
'rgb(0,198,248)',
'rgb(135,133,0)',
'rgb(0,167,108)',
'rgb(189,189,189)',
]
}
</script> </script>
<script> <script>
/* Ignore Double Script Section Error in IDE */ // Ignore VSC IDE "One Instance Level Script" Error
import { onMount, getContext } from "svelte";
import Chart from 'chart.js/auto'; import Chart from 'chart.js/auto';
import { onMount } from 'svelte';
/* Svelte 5 Props */ /* Svelte 5 Props */
let { let {
@@ -45,21 +76,11 @@
quantities, quantities,
entities, entities,
displayLegend = false, displayLegend = false,
useAltColors = false,
} = $props(); } = $props();
/* Const Init */ /* Const Init */
const data = { const useCbColors = getContext("cc-config")?.plot_general_colorblindMode || false
labels: entities,
datasets: [
{
label: sliceLabel,
data: quantities,
fill: 1,
backgroundColor: colors.slice(0, quantities.length)
}
]
};
const options = { const options = {
maintainAspectRatio: false, maintainAspectRatio: false,
animation: false, animation: false,
@@ -70,6 +91,31 @@
} }
}; };
/* Derived */
const colorPalette = $derived.by(() => {
let c;
if (useCbColors) {
c = [...colors['colorblind']];
} else if (useAltColors) {
c = [...colors['alternative']];
} else {
c = [...colors['default']];
}
return c.slice(0, quantities.length);
})
const data = $derived({
labels: entities,
datasets: [
{
label: sliceLabel,
data: quantities,
fill: 1,
backgroundColor: colorPalette,
}
]
});
/* On Mount */ /* On Mount */
onMount(() => { onMount(() => {
new Chart( new Chart(
@@ -84,7 +130,7 @@
</script> </script>
<!-- <div style="width: 500px;"><canvas id="dimensions"></canvas></div><br/> --> <!-- <div style="width: 500px;"><canvas id="dimensions"></canvas></div><br/> -->
<div class="chart-container" style="--container-width: {size}; --container-height: {size}"> <div class="chart-container" style="--container-width: {size}px; --container-height: {size}px">
<canvas id={canvasId}></canvas> <canvas id={canvasId}></canvas>
</div> </div>

View File

@@ -3,7 +3,6 @@
Properties: Properties:
- `data [null, [], []]`: Roofline Data Structure, see below for details [Default: null] - `data [null, [], []]`: Roofline Data Structure, see below for details [Default: null]
- `renderTime Bool?`: If time information should be rendered as colored dots [Default: false]
- `allowSizeChange Bool?`: If dimensions of rendered plot can change [Default: false] - `allowSizeChange Bool?`: If dimensions of rendered plot can change [Default: false]
- `subCluster GraphQL.SubCluster?`: SubCluster Object; contains required topology information [Default: null] - `subCluster GraphQL.SubCluster?`: SubCluster Object; contains required topology information [Default: null]
- `width Number?`: Plot width (reactively adaptive) [Default: 600] - `width Number?`: Plot width (reactively adaptive) [Default: 600]
@@ -21,19 +20,22 @@
- `data[2] = [0.1, 0.15, 0.2, ...]` - `data[2] = [0.1, 0.15, 0.2, ...]`
- Color Code: Time Information (Floats from 0 to 1) (Optional) - Color Code: Time Information (Floats from 0 to 1) (Optional)
--> -->
<script> <script>
import uPlot from "uplot"; import uPlot from "uplot";
import { formatNumber } from "../units.js"; import { formatNumber } from "../units.js";
import { onMount, onDestroy } from "svelte"; import { onMount, onDestroy } from "svelte";
import { Card } from "@sveltestrap/sveltestrap"; import { Card } from "@sveltestrap/sveltestrap";
import { roundTwoDigits } from "../units.js";
/* Svelte 5 Props */ /* Svelte 5 Props */
let { let {
data = null, roofData = null,
renderTime = false, jobsData = null,
allowSizeChange = false, nodesData = null,
cluster = null,
subCluster = null, subCluster = null,
allowSizeChange = false,
useColors = true,
width = 600, width = 600,
height = 380, height = 380,
} = $props(); } = $props();
@@ -54,8 +56,27 @@
if (allowSizeChange) sizeChanged(width, height); if (allowSizeChange) sizeChanged(width, height);
}); });
// Copied Example Vars for Uplot Bubble
// https://developer.mozilla.org/en-US/docs/Web/API/CanvasRenderingContext2D/isPointInPath
let qt;
let hRect;
let pxRatio;
function setPxRatio() {
pxRatio = uPlot.pxRatio;
}
setPxRatio();
window.addEventListener('dppxchange', setPxRatio);
// let minSize = 6;
let maxSize = 60;
// let maxArea = Math.PI * (maxSize / 2) ** 2;
// let minArea = Math.PI * (minSize / 2) ** 2;
/* Functions */ /* Functions */
// Helper // Helper
function pointWithin(px, py, rlft, rtop, rrgt, rbtm) {
return px >= rlft && px <= rrgt && py >= rtop && py <= rbtm;
}
function getGradientR(x) { function getGradientR(x) {
if (x < 0.5) return 0; if (x < 0.5) return 0;
if (x > 0.75) return 255; if (x > 0.75) return 255;
@@ -74,8 +95,9 @@
x = 1.0 - (x - 0.25) * 4.0; x = 1.0 - (x - 0.25) * 4.0;
return Math.floor(x * 255.0); return Math.floor(x * 255.0);
} }
function getRGB(c) { function getRGB(c, transparent = false) {
return `rgb(${cbmode ? '0' : getGradientR(c)}, ${getGradientG(c)}, ${getGradientB(c)})`; if (transparent) return `rgba(${cbmode ? '0' : getGradientR(c)}, ${getGradientG(c)}, ${getGradientB(c)}, 0.5)`;
else return `rgb(${cbmode ? '0' : getGradientR(c)}, ${getGradientG(c)}, ${getGradientB(c)})`;
} }
function nearestThousand(num) { function nearestThousand(num) {
return Math.ceil(num / 1000) * 1000; return Math.ceil(num / 1000) * 1000;
@@ -89,126 +111,492 @@
}; };
} }
// Dot Renderers // quadratic scaling (px area)
const drawColorPoints = (u, seriesIdx, idx0, idx1) => { // function getSize(value, minValue, maxValue) {
const size = 5 * devicePixelRatio; // let pct = value / maxValue;
uPlot.orient( // // clamp to min area
u, // //let area = Math.max(maxArea * pct, minArea);
seriesIdx, // let area = maxArea * pct;
( // return Math.sqrt(area / Math.PI) * 2;
series, // }
dataX,
dataY, // function getSizeMinMax(u) {
scaleX, // let minValue = Infinity;
scaleY, // let maxValue = -Infinity;
valToPosX, // for (let i = 1; i < u.series.length; i++) {
valToPosY, // let sizeData = u.data[i][2];
xOff, // for (let j = 0; j < sizeData.length; j++) {
yOff, // minValue = Math.min(minValue, sizeData[j]);
xDim, // maxValue = Math.max(maxValue, sizeData[j]);
yDim, // }
moveTo, // }
lineTo, // return [minValue, maxValue];
rect, // }
arc,
) => { // Quadtree Object (TODO: Split and Import)
class Quadtree {
constructor (x, y, w, h, l) {
let t = this;
t.x = x;
t.y = y;
t.w = w;
t.h = h;
t.l = l || 0;
t.o = [];
t.q = null;
t.MAX_OBJECTS = 10;
t.MAX_LEVELS = 4;
};
get quadtree() {
return "Implement me!";
}
split() {
let t = this,
x = t.x,
y = t.y,
w = t.w / 2,
h = t.h / 2,
l = t.l + 1;
t.q = [
// top right
new Quadtree(x + w, y, w, h, l),
// top left
new Quadtree(x, y, w, h, l),
// bottom left
new Quadtree(x, y + h, w, h, l),
// bottom right
new Quadtree(x + w, y + h, w, h, l),
];
};
quads(x, y, w, h, cb) {
let t = this,
q = t.q,
hzMid = t.x + t.w / 2,
vtMid = t.y + t.h / 2,
startIsNorth = y < vtMid,
startIsWest = x < hzMid,
endIsEast = x + w > hzMid,
endIsSouth = y + h > vtMid;
// top-right quad
startIsNorth && endIsEast && cb(q[0]);
// top-left quad
startIsWest && startIsNorth && cb(q[1]);
// bottom-left quad
startIsWest && endIsSouth && cb(q[2]);
// bottom-right quad
endIsEast && endIsSouth && cb(q[3]);
};
add(o) {
let t = this;
if (t.q != null) {
t.quads(o.x, o.y, o.w, o.h, q => {
q.add(o);
});
}
else {
let os = t.o;
os.push(o);
if (os.length > t.MAX_OBJECTS && t.l < t.MAX_LEVELS) {
t.split();
for (let i = 0; i < os.length; i++) {
let oi = os[i];
t.quads(oi.x, oi.y, oi.w, oi.h, q => {
q.add(oi);
});
}
t.o.length = 0;
}
}
};
get(x, y, w, h, cb) {
let t = this;
let os = t.o;
for (let i = 0; i < os.length; i++)
cb(os[i]);
if (t.q != null) {
t.quads(x, y, w, h, q => {
q.get(x, y, w, h, cb);
});
}
}
clear() {
this.o.length = 0;
this.q = null;
}
}
// Dot Renderer
const makeDrawPoints = (opts) => {
let {/*size, disp,*/ transparentFill, each = () => {}} = opts;
const sizeBase = 6 * pxRatio;
return (u, seriesIdx, idx0, idx1) => {
uPlot.orient(u, seriesIdx, (series, dataX, dataY, scaleX, scaleY, valToPosX, valToPosY, xOff, yOff, xDim, yDim, moveTo, lineTo, rect, arc) => {
let d = u.data[seriesIdx]; let d = u.data[seriesIdx];
let strokeWidth = 1;
let deg360 = 2 * Math.PI; let deg360 = 2 * Math.PI;
/* Alt.: Sizes based on other Data Rows */
// let sizes = disp.size.values(u, seriesIdx, idx0, idx1);
u.ctx.save();
u.ctx.rect(u.bbox.left, u.bbox.top, u.bbox.width, u.bbox.height);
u.ctx.clip();
u.ctx.lineWidth = strokeWidth;
// todo: this depends on direction & orientation
// todo: calc once per redraw, not per path
let filtLft = u.posToVal(-maxSize / 2, scaleX.key);
let filtRgt = u.posToVal(u.bbox.width / pxRatio + maxSize / 2, scaleX.key);
let filtBtm = u.posToVal(u.bbox.height / pxRatio + maxSize / 2, scaleY.key);
let filtTop = u.posToVal(-maxSize / 2, scaleY.key);
for (let i = 0; i < d[0].length; i++) { for (let i = 0; i < d[0].length; i++) {
let p = new Path2D(); if (useColors) {
u.ctx.strokeStyle = "rgb(0, 0, 0)";
// Jobs: Color based on Duration
if (jobsData) {
//u.ctx.strokeStyle = getRGB(u.data[2][i]);
u.ctx.fillStyle = getRGB(u.data[2][i], transparentFill);
// Nodes: Color based on Idle vs. Allocated
} else if (nodesData) {
// console.log('In Plot Handler NodesData', nodesData)
if (nodesData[i]?.nodeState == "idle") {
//u.ctx.strokeStyle = "rgb(0, 0, 255)";
u.ctx.fillStyle = "rgba(0, 0, 255, 0.5)";
} else if (nodesData[i]?.nodeState == "allocated") {
//u.ctx.strokeStyle = "rgb(0, 255, 0)";
u.ctx.fillStyle = "rgba(0, 255, 0, 0.5)";
} else if (nodesData[i]?.nodeState == "notindb") {
//u.ctx.strokeStyle = "rgb(0, 0, 0)";
u.ctx.fillStyle = "rgba(0, 0, 0, 0.5)";
} else { // Fallback: All other DEFINED states
//u.ctx.strokeStyle = "rgb(255, 0, 0)";
u.ctx.fillStyle = "rgba(255, 0, 0, 0.5)";
}
}
} else {
// No Colors: Use Black
u.ctx.strokeStyle = "rgb(0, 0, 0)";
u.ctx.fillStyle = "rgba(0, 0, 0, 0.5)";
}
// Get Values
let xVal = d[0][i]; let xVal = d[0][i];
let yVal = d[1][i]; let yVal = d[1][i];
u.ctx.strokeStyle = getRGB(u.data[2][i]);
u.ctx.fillStyle = getRGB(u.data[2][i]); // Calc Size; Alt.: size = sizes[i] * pxRatio
if ( let size = 1;
xVal >= scaleX.min &&
xVal <= scaleX.max && // Jobs: Size based on Resourcecount
yVal >= scaleY.min && if (jobsData) {
yVal <= scaleY.max const scaling = jobsData[i].numNodes > 12
) { ? 24 // Capped Dot Size
: jobsData[i].numNodes > 1
? jobsData[i].numNodes * 2 // MultiNode Scaling
: jobsData[i]?.numAcc ? jobsData[i].numAcc : jobsData[i].numNodes * 2 // Single Node or Scale by Accs
size = sizeBase + scaling
// Nodes: Size based on Jobcount
} else if (nodesData) {
size = sizeBase + (nodesData[i]?.numJobs * 1.5) // Max Jobs Scale: 8 * 1.5 = 12
};
if (xVal >= filtLft && xVal <= filtRgt && yVal >= filtBtm && yVal <= filtTop) {
let cx = valToPosX(xVal, scaleX, xDim, xOff); let cx = valToPosX(xVal, scaleX, xDim, xOff);
let cy = valToPosY(yVal, scaleY, yDim, yOff); let cy = valToPosY(yVal, scaleY, yDim, yOff);
p.moveTo(cx + size / 2, cy); u.ctx.moveTo(cx + size/2, cy);
arc(p, cx, cy, size / 2, 0, deg360); u.ctx.beginPath();
u.ctx.arc(cx, cy, size/2, 0, deg360);
u.ctx.fill();
u.ctx.stroke();
each(u, seriesIdx, i,
cx - size/2 - strokeWidth/2,
cy - size/2 - strokeWidth/2,
size + strokeWidth,
size + strokeWidth
);
} }
u.ctx.fill(p);
} }
}, u.ctx.restore();
); });
return null; return null;
};
}; };
const drawPoints = (u, seriesIdx, idx0, idx1) => { let drawPoints = makeDrawPoints({
const size = 5 * devicePixelRatio; // disp: {
uPlot.orient( // size: {
u, // // unit: 3, // raw CSS pixels
seriesIdx, // // discr: true,
( // values: (u, seriesIdx, idx0, idx1) => {
series, // /* Func to get sizes from additional subSeries [series][2...x] ([0,1] is [x,y]) */
dataX, // // TODO: only run once per setData() call
dataY, // let [minValue, maxValue] = getSizeMinMax(u);
scaleX, // return u.data[seriesIdx][2].map(v => getSize(v, minValue, maxValue));
scaleY, // },
valToPosX, // },
valToPosY, // },
xOff, transparentFill: true,
yOff, each: (u, seriesIdx, dataIdx, lft, top, wid, hgt) => {
xDim, // we get back raw canvas coords (included axes & padding). translate to the plotting area origin
yDim, lft -= u.bbox.left;
moveTo, top -= u.bbox.top;
lineTo, qt.add({x: lft, y: top, w: wid, h: hgt, sidx: seriesIdx, didx: dataIdx});
rect, },
arc, });
) => {
let d = u.data[seriesIdx]; const legendValues = (u, seriesIdx, dataIdx) => {
u.ctx.strokeStyle = getRGB(0); // when data null, it's initial schema probe (also u.status == 0)
u.ctx.fillStyle = getRGB(0); if (u.data == null || dataIdx == null || hRect == null || hRect.sidx != seriesIdx) {
let deg360 = 2 * Math.PI; return {
let p = new Path2D(); "Intensity [FLOPS/Byte]": '-',
for (let i = 0; i < d[0].length; i++) { "":'',
let xVal = d[0][i]; "Performace [GFLOPS]": '-'
let yVal = d[1][i]; };
if ( }
xVal >= scaleX.min &&
xVal <= scaleX.max && return {
yVal >= scaleY.min && "Intensity [FLOPS/Byte]": roundTwoDigits(u.data[seriesIdx][0][dataIdx]),
yVal <= scaleY.max "":'',
) { "Performace [GFLOPS]": roundTwoDigits(u.data[seriesIdx][1][dataIdx]),
let cx = valToPosX(xVal, scaleX, xDim, xOff); };
let cy = valToPosY(yVal, scaleY, yDim, yOff); };
p.moveTo(cx + size / 2, cy);
arc(p, cx, cy, size / 2, 0, deg360); // Tooltip Plugin
function tooltipPlugin({onclick, getLegendData, shiftX = 10, shiftY = 10}) {
let tooltipLeftOffset = 0;
let tooltipTopOffset = 0;
const tooltip = document.createElement("div");
// Build Manual Class By Styles
tooltip.style.fontSize = "10pt";
tooltip.style.position = "absolute";
tooltip.style.background = "#fcfcfc";
tooltip.style.display = "none";
tooltip.style.border = "2px solid black";
tooltip.style.padding = "4px";
tooltip.style.pointerEvents = "none";
tooltip.style.zIndex = "100";
tooltip.style.whiteSpace = "pre";
tooltip.style.fontFamily = "monospace";
const tipSeriesIdx = 1; // Scatter: Series IDX is always 1
let tipDataIdx = null;
// const fmtDate = uPlot.fmtDate("{M}/{D}/{YY} {h}:{mm}:{ss} {AA}");
let over;
let tooltipVisible = false;
function showTooltip() {
if (!tooltipVisible) {
tooltip.style.display = "block";
over.style.cursor = "pointer";
tooltipVisible = true;
}
}
function hideTooltip() {
if (tooltipVisible) {
tooltip.style.display = "none";
over.style.cursor = null;
tooltipVisible = false;
}
}
function setTooltip(u, i) {
showTooltip();
let top = u.valToPos(u.data[tipSeriesIdx][1][i], 'y');
let lft = u.valToPos(u.data[tipSeriesIdx][0][i], 'x');
tooltip.style.top = (tooltipTopOffset + top + shiftX) + "px";
tooltip.style.left = (tooltipLeftOffset + lft + shiftY) + "px";
if (useColors) {
// Jobs: Color based on Duration
if (jobsData) {
tooltip.style.borderColor = getRGB(u.data[2][i]);
// Nodes: Color based on Idle vs. Allocated
} else if (nodesData) {
if (nodesData[i]?.nodeState == "idle") {
tooltip.style.borderColor = "rgb(0, 0, 255)";
} else if (nodesData[i]?.nodeState == "allocated") {
tooltip.style.borderColor = "rgb(0, 255, 0)";
} else if (nodesData[i]?.nodeState == "notindb") { // Missing from DB table
tooltip.style.borderColor = "rgb(0, 0, 0)";
} else { // Fallback: All other DEFINED states
tooltip.style.borderColor = "rgb(255, 0, 0)";
} }
} }
u.ctx.fill(p); } else {
}, // No Colors: Use Black
); tooltip.style.borderColor = "rgb(0, 0, 0)";
return null; }
};
if (jobsData) {
tooltip.textContent = (
// Tooltip Content as String for Job
`Job ID: ${getLegendData(u, i).jobId}\nRuntime: ${getLegendData(u, i).duration}\nNodes: ${getLegendData(u, i).numNodes}${getLegendData(u, i)?.numAcc?`\nAccelerators: ${getLegendData(u, i).numAcc}`:''}`
);
} else if (nodesData && useColors) {
tooltip.textContent = (
// Tooltip Content as String for Node
`Host: ${getLegendData(u, i).nodeName}\nState: ${getLegendData(u, i).nodeState}\nJobs: ${getLegendData(u, i).numJobs}`
);
} else if (nodesData && !useColors) {
tooltip.textContent = (
// Tooltip Content as String for Node
`Host: ${getLegendData(u, i).nodeName}\nJobs: ${getLegendData(u, i).numJobs}`
);
}
}
return {
hooks: {
ready: [
u => {
over = u.over;
tooltipLeftOffset = parseFloat(over.style.left);
tooltipTopOffset = parseFloat(over.style.top);
u.root.querySelector(".u-wrap").appendChild(tooltip);
let clientX;
let clientY;
over.addEventListener("mousedown", e => {
clientX = e.clientX;
clientY = e.clientY;
});
over.addEventListener("mouseup", e => {
// clicked in-place
if (e.clientX == clientX && e.clientY == clientY) {
if (tipDataIdx != null) {
onclick(u, tipDataIdx);
}
}
});
}
],
setCursor: [
u => {
let i = u.legend.idxs[1];
if (i != null) {
tipDataIdx = i;
setTooltip(u, i);
} else {
tipDataIdx = null;
hideTooltip();
}
}
]
}
};
}
// Main Functions // Main Functions
function sizeChanged() { function sizeChanged() {
if (timeoutId != null) clearTimeout(timeoutId); if (timeoutId != null) clearTimeout(timeoutId);
timeoutId = setTimeout(() => { timeoutId = setTimeout(() => {
timeoutId = null; timeoutId = null;
if (uplot) uplot.destroy(); if (uplot) uplot.destroy();
render(data); render(roofData, jobsData, nodesData);
}, 200); }, 200);
} }
function render(plotData) { function render(roofData, jobsData, nodesData) {
if (plotData) { let plotTitle = "CPU Roofline Diagram";
if (jobsData) plotTitle = "Job Average Roofline Diagram";
if (nodesData) plotTitle = "Node Average Roofline Diagram";
if (roofData) {
const opts = { const opts = {
title: "CPU Roofline Diagram", title: plotTitle,
mode: 2, mode: 2,
width: width, width: width,
height: height, height: height,
legend: { legend: {
show: false, show: true,
},
cursor: {
dataIdx: (u, seriesIdx) => {
if (seriesIdx == 1) {
hRect = null;
let dist = Infinity;
let area = Infinity;
let cx = u.cursor.left * pxRatio;
let cy = u.cursor.top * pxRatio;
qt.get(cx, cy, 1, 1, o => {
if (pointWithin(cx, cy, o.x, o.y, o.x + o.w, o.y + o.h)) {
let ocx = o.x + o.w / 2;
let ocy = o.y + o.h / 2;
let dx = ocx - cx;
let dy = ocy - cy;
let d = Math.sqrt(dx ** 2 + dy ** 2);
// test against radius for actual hover
if (d <= o.w / 2) {
let a = o.w * o.h;
// prefer smallest
if (a < area) {
area = a;
dist = d;
hRect = o;
}
// only hover bbox with closest distance
else if (a == area && d <= dist) {
dist = d;
hRect = o;
}
}
}
});
}
return hRect && seriesIdx == hRect.sidx ? hRect.didx : null;
},
/* Render "Fill" on Data Point Hover: Works in Example Bubble, does not work here? Guess: Interference with tooltip */
// points: {
// size: (u, seriesIdx) => {
// return hRect && seriesIdx == hRect.sidx ? hRect.w / pxRatio : 0;
// }
// },
/* Make all non-focused series semi-transparent: Useless unless more than one series rendered */
// focus: {
// prox: 1e3,
// alpha: 0.3,
// dist: (u, seriesIdx) => {
// let prox = (hRect?.sidx === seriesIdx ? 0 : Infinity);
// return prox;
// },
// },
drag: { // Activates Zoom: Only one Dimension; YX Breaks Zoom Reset (Reason TBD)
x: true,
y: false
},
}, },
cursor: { drag: { x: false, y: false } },
axes: [ axes: [
{ {
label: "Intensity [FLOPS/Byte]", label: "Intensity [FLOPS/Byte]",
@@ -228,7 +616,7 @@
}, },
y: { y: {
range: [ range: [
1.0, 0.01,
subCluster?.flopRateSimd?.value subCluster?.flopRateSimd?.value
? nearestThousand(subCluster.flopRateSimd.value) ? nearestThousand(subCluster.flopRateSimd.value)
: 10000, : 10000,
@@ -237,12 +625,36 @@
log: 10, // log exp log: 10, // log exp
}, },
}, },
series: [{}, { paths: renderTime ? drawColorPoints : drawPoints }], series: [
null,
{
/* Facets: Define Purpose of Sub-Arrays in Series-Array, e.g. x, y, size, label, color, ... */
// facets: [
// {
// scale: 'x',
// auto: true,
// },
// {
// scale: 'y',
// auto: true,
// }
// ],
paths: drawPoints,
values: legendValues
}
],
hooks: { hooks: {
// setSeries: [ (u, seriesIdx) => console.log('setSeries', seriesIdx) ],
// setLegend: [ u => console.log('setLegend', u.legend.idxs) ],
drawClear: [ drawClear: [
(u) => { (u) => {
qt = qt || new Quadtree(0, 0, u.bbox.width, u.bbox.height);
qt.clear();
// force-clear the path cache to cause drawBars() to rebuild new quadtree
u.series.forEach((s, i) => { u.series.forEach((s, i) => {
if (i > 0) s._paths = null; if (i > 0)
s._paths = null;
}); });
}, },
], ],
@@ -334,30 +746,92 @@
// Reset grid lineWidth // Reset grid lineWidth
u.ctx.lineWidth = 0.15; u.ctx.lineWidth = 0.15;
} }
if (renderTime) {
// The Color Scale For Time Information /* Render Scales */
const posX = u.valToPos(0.1, "x", true) if (useColors) {
const posXLimit = u.valToPos(100, "x", true) // Jobs: The Color Scale For Time Information
const posY = u.valToPos(14000.0, "y", true) if (jobsData) {
u.ctx.fillStyle = 'black' const posX = u.valToPos(0.1, "x", true)
u.ctx.fillText('Start', posX, posY) const posXLimit = u.valToPos(100, "x", true)
const start = posX + 10 const posY = u.valToPos(17500.0, "y", true)
for (let x = start; x < posXLimit; x += 10) { u.ctx.fillStyle = 'black'
u.ctx.fillText('0 Hours', posX, posY)
const start = posX + 10
for (let x = start; x < posXLimit; x += 10) {
let c = (x - start) / (posXLimit - start) let c = (x - start) / (posXLimit - start)
u.ctx.fillStyle = getRGB(c) u.ctx.fillStyle = getRGB(c)
u.ctx.beginPath() u.ctx.beginPath()
u.ctx.arc(x, posY, 3, 0, Math.PI * 2, false) u.ctx.arc(x, posY, 3, 0, Math.PI * 2, false)
u.ctx.fill() u.ctx.fill()
}
u.ctx.fillStyle = 'black'
u.ctx.fillText('24 Hours', posXLimit + 55, posY)
}
// Nodes: The Colors Of NodeStates
if (nodesData) {
const posY = u.valToPos(17500.0, "y", true)
const posAllocDot = u.valToPos(0.03, "x", true)
const posAllocText = posAllocDot + 60
const posIdleDot = u.valToPos(0.3, "x", true)
const posIdleText = posIdleDot + 30
const posOtherDot = u.valToPos(3, "x", true)
const posOtherText = posOtherDot + 40
const posMissingDot = u.valToPos(30, "x", true)
const posMissingText = posMissingDot + 80
u.ctx.fillStyle = "rgb(0, 255, 0)"
u.ctx.beginPath()
u.ctx.arc(posAllocDot, posY, 3, 0, Math.PI * 2, false)
u.ctx.fill()
u.ctx.fillStyle = 'black'
u.ctx.fillText('Allocated', posAllocText, posY)
u.ctx.fillStyle = "rgb(0, 0, 255)"
u.ctx.beginPath()
u.ctx.arc(posIdleDot, posY, 3, 0, Math.PI * 2, false)
u.ctx.fill()
u.ctx.fillStyle = 'black'
u.ctx.fillText('Idle', posIdleText, posY)
u.ctx.fillStyle = "rgb(255, 0, 0)"
u.ctx.beginPath()
u.ctx.arc(posOtherDot, posY, 3, 0, Math.PI * 2, false)
u.ctx.fill()
u.ctx.fillStyle = 'black'
u.ctx.fillText('Other', posOtherText, posY)
u.ctx.fillStyle = 'black'
u.ctx.beginPath()
u.ctx.arc(posMissingDot, posY, 3, 0, Math.PI * 2, false)
u.ctx.fill()
u.ctx.fillText('Missing in DB', posMissingText, posY)
} }
u.ctx.fillStyle = 'black'
u.ctx.fillText('End', posXLimit + 23, posY)
} }
}, },
], ],
}, },
// cursor: { drag: { x: true, y: true } } // Activate zoom plugins: [
tooltipPlugin({
onclick(u, dataIdx) {
if (jobsData) {
window.open(`/monitoring/job/${jobsData[dataIdx].id}`)
} else if (nodesData) {
window.open(`/monitoring/node/${cluster}/${nodesData[dataIdx].nodeName}`)
}
},
getLegendData: (u, dataIdx) => {
if (jobsData) {
return jobsData[dataIdx]
} else if (nodesData) {
return nodesData[dataIdx]
}
}
}),
],
}; };
uplot = new uPlot(opts, plotData, plotWrapper); uplot = new uPlot(opts, roofData, plotWrapper);
} else { } else {
// console.log("No data for roofline!"); // console.log("No data for roofline!");
} }
@@ -365,7 +839,7 @@
/* On Mount */ /* On Mount */
onMount(() => { onMount(() => {
render(data); render(roofData, jobsData, nodesData);
}); });
/* On Destroy */ /* On Destroy */
@@ -375,10 +849,8 @@
}); });
</script> </script>
{#if data != null} {#if roofData != null}
<div bind:this={plotWrapper} class="p-2"></div> <div bind:this={plotWrapper} class="p-2"></div>
{:else} {:else}
<Card class="mx-4" body color="warning">Cannot render roofline: No data!</Card <Card class="mx-4" body color="warning">Cannot render roofline: No data!</Card>
>
{/if} {/if}

View File

@@ -0,0 +1,384 @@
<!--
@component Roofline Model Plot based on uPlot
Properties:
- `data [null, [], []]`: Roofline Data Structure, see below for details [Default: null]
- `renderTime Bool?`: If time information should be rendered as colored dots [Default: false]
- `allowSizeChange Bool?`: If dimensions of rendered plot can change [Default: false]
- `subCluster GraphQL.SubCluster?`: SubCluster Object; contains required topology information [Default: null]
- `width Number?`: Plot width (reactively adaptive) [Default: 600]
- `height Number?`: Plot height (reactively adaptive) [Default: 380]
Data Format:
- `data = [null, [], []]`
- Index 0: null-axis required for scatter
- Index 1: Array of XY-Arrays for Scatter
- Index 2: Optional Time Info
- `data[1][0] = [100, 200, 500, ...]`
- X Axis: Intensity (Vals up to clusters' flopRateScalar value)
- `data[1][1] = [1000, 2000, 1500, ...]`
- Y Axis: Performance (Vals up to clusters' flopRateSimd value)
- `data[2] = [0.1, 0.15, 0.2, ...]`
- Color Code: Time Information (Floats from 0 to 1) (Optional)
-->
<script>
import uPlot from "uplot";
import { formatNumber } from "../units.js";
import { onMount, onDestroy } from "svelte";
import { Card } from "@sveltestrap/sveltestrap";
/* Svelte 5 Props */
let {
data = null,
renderTime = false,
allowSizeChange = false,
subCluster = null,
width = 600,
height = 380,
} = $props();
/* Const Init */
const lineWidth = clusterCockpitConfig?.plot_general_lineWidth || 2;
const cbmode = clusterCockpitConfig?.plot_general_colorblindMode || false;
/* Var Init */
let timeoutId = null;
/* State Init */
let plotWrapper = $state(null);
let uplot = $state(null);
/* Effect */
$effect(() => {
if (allowSizeChange) sizeChanged(width, height);
});
/* Functions */
// Helper
function getGradientR(x) {
if (x < 0.5) return 0;
if (x > 0.75) return 255;
x = (x - 0.5) * 4.0;
return Math.floor(x * 255.0);
}
function getGradientG(x) {
if (x > 0.25 && x < 0.75) return 255;
if (x < 0.25) x = x * 4.0;
else x = 1.0 - (x - 0.75) * 4.0;
return Math.floor(x * 255.0);
}
function getGradientB(x) {
if (x < 0.25) return 255;
if (x > 0.5) return 0;
x = 1.0 - (x - 0.25) * 4.0;
return Math.floor(x * 255.0);
}
function getRGB(c) {
return `rgb(${cbmode ? '0' : getGradientR(c)}, ${getGradientG(c)}, ${getGradientB(c)})`;
}
function nearestThousand(num) {
return Math.ceil(num / 1000) * 1000;
}
function lineIntersect(x1, y1, x2, y2, x3, y3, x4, y4) {
let l = (y4 - y3) * (x2 - x1) - (x4 - x3) * (y2 - y1);
let a = ((x4 - x3) * (y1 - y3) - (y4 - y3) * (x1 - x3)) / l;
return {
x: x1 + a * (x2 - x1),
y: y1 + a * (y2 - y1),
};
}
// Dot Renderers
const drawColorPoints = (u, seriesIdx, idx0, idx1) => {
const size = 5 * devicePixelRatio;
uPlot.orient(
u,
seriesIdx,
(
series,
dataX,
dataY,
scaleX,
scaleY,
valToPosX,
valToPosY,
xOff,
yOff,
xDim,
yDim,
moveTo,
lineTo,
rect,
arc,
) => {
let d = u.data[seriesIdx];
let deg360 = 2 * Math.PI;
for (let i = 0; i < d[0].length; i++) {
let p = new Path2D();
let xVal = d[0][i];
let yVal = d[1][i];
u.ctx.strokeStyle = getRGB(u.data[2][i]);
u.ctx.fillStyle = getRGB(u.data[2][i]);
if (
xVal >= scaleX.min &&
xVal <= scaleX.max &&
yVal >= scaleY.min &&
yVal <= scaleY.max
) {
let cx = valToPosX(xVal, scaleX, xDim, xOff);
let cy = valToPosY(yVal, scaleY, yDim, yOff);
p.moveTo(cx + size / 2, cy);
arc(p, cx, cy, size / 2, 0, deg360);
}
u.ctx.fill(p);
}
},
);
return null;
};
const drawPoints = (u, seriesIdx, idx0, idx1) => {
const size = 5 * devicePixelRatio;
uPlot.orient(
u,
seriesIdx,
(
series,
dataX,
dataY,
scaleX,
scaleY,
valToPosX,
valToPosY,
xOff,
yOff,
xDim,
yDim,
moveTo,
lineTo,
rect,
arc,
) => {
let d = u.data[seriesIdx];
u.ctx.strokeStyle = getRGB(0);
u.ctx.fillStyle = getRGB(0);
let deg360 = 2 * Math.PI;
let p = new Path2D();
for (let i = 0; i < d[0].length; i++) {
let xVal = d[0][i];
let yVal = d[1][i];
if (
xVal >= scaleX.min &&
xVal <= scaleX.max &&
yVal >= scaleY.min &&
yVal <= scaleY.max
) {
let cx = valToPosX(xVal, scaleX, xDim, xOff);
let cy = valToPosY(yVal, scaleY, yDim, yOff);
p.moveTo(cx + size / 2, cy);
arc(p, cx, cy, size / 2, 0, deg360);
}
}
u.ctx.fill(p);
},
);
return null;
};
// Main Functions
function sizeChanged() {
if (timeoutId != null) clearTimeout(timeoutId);
timeoutId = setTimeout(() => {
timeoutId = null;
if (uplot) uplot.destroy();
render(data);
}, 200);
}
function render(plotData) {
if (plotData) {
const opts = {
title: "CPU Roofline Diagram",
mode: 2,
width: width,
height: height,
legend: {
show: false,
},
cursor: { drag: { x: false, y: false } },
axes: [
{
label: "Intensity [FLOPS/Byte]",
values: (u, vals) => vals.map((v) => formatNumber(v)),
},
{
label: "Performace [GFLOPS]",
values: (u, vals) => vals.map((v) => formatNumber(v)),
},
],
scales: {
x: {
time: false,
range: [0.01, 1000],
distr: 3, // Render as log
log: 10, // log exp
},
y: {
range: [
1.0,
subCluster?.flopRateSimd?.value
? nearestThousand(subCluster.flopRateSimd.value)
: 10000,
],
distr: 3, // Render as log
log: 10, // log exp
},
},
series: [{}, { paths: renderTime ? drawColorPoints : drawPoints }],
hooks: {
drawClear: [
(u) => {
u.series.forEach((s, i) => {
if (i > 0) s._paths = null;
});
},
],
draw: [
(u) => {
// draw roofs when subCluster set
if (subCluster != null) {
const padding = u._padding; // [top, right, bottom, left]
u.ctx.strokeStyle = "black";
u.ctx.lineWidth = lineWidth;
u.ctx.beginPath();
const ycut = 0.01 * subCluster.memoryBandwidth.value;
const scalarKnee =
(subCluster.flopRateScalar.value - ycut) /
subCluster.memoryBandwidth.value;
const simdKnee =
(subCluster.flopRateSimd.value - ycut) /
subCluster.memoryBandwidth.value;
const scalarKneeX = u.valToPos(scalarKnee, "x", true), // Value, axis, toCanvasPixels
simdKneeX = u.valToPos(simdKnee, "x", true),
flopRateScalarY = u.valToPos(
subCluster.flopRateScalar.value,
"y",
true,
),
flopRateSimdY = u.valToPos(
subCluster.flopRateSimd.value,
"y",
true,
);
if (
scalarKneeX <
width * window.devicePixelRatio -
padding[1] * window.devicePixelRatio
) {
// Lower horizontal roofline
u.ctx.moveTo(scalarKneeX, flopRateScalarY);
u.ctx.lineTo(
width * window.devicePixelRatio -
padding[1] * window.devicePixelRatio,
flopRateScalarY,
);
}
if (
simdKneeX <
width * window.devicePixelRatio -
padding[1] * window.devicePixelRatio
) {
// Top horitontal roofline
u.ctx.moveTo(simdKneeX, flopRateSimdY);
u.ctx.lineTo(
width * window.devicePixelRatio -
padding[1] * window.devicePixelRatio,
flopRateSimdY,
);
}
let x1 = u.valToPos(0.01, "x", true),
y1 = u.valToPos(ycut, "y", true);
let x2 = u.valToPos(simdKnee, "x", true),
y2 = flopRateSimdY;
let xAxisIntersect = lineIntersect(
x1,
y1,
x2,
y2,
u.valToPos(0.01, "x", true),
u.valToPos(1.0, "y", true), // X-Axis Start Coords
u.valToPos(1000, "x", true),
u.valToPos(1.0, "y", true), // X-Axis End Coords
);
if (xAxisIntersect.x > x1) {
x1 = xAxisIntersect.x;
y1 = xAxisIntersect.y;
}
// Diagonal
u.ctx.moveTo(x1, y1);
u.ctx.lineTo(x2, y2);
u.ctx.stroke();
// Reset grid lineWidth
u.ctx.lineWidth = 0.15;
}
if (renderTime) {
// The Color Scale For Time Information
const posX = u.valToPos(0.1, "x", true)
const posXLimit = u.valToPos(100, "x", true)
const posY = u.valToPos(14000.0, "y", true)
u.ctx.fillStyle = 'black'
u.ctx.fillText('Start', posX, posY)
const start = posX + 10
for (let x = start; x < posXLimit; x += 10) {
let c = (x - start) / (posXLimit - start)
u.ctx.fillStyle = getRGB(c)
u.ctx.beginPath()
u.ctx.arc(x, posY, 3, 0, Math.PI * 2, false)
u.ctx.fill()
}
u.ctx.fillStyle = 'black'
u.ctx.fillText('End', posXLimit + 23, posY)
}
},
],
},
// cursor: { drag: { x: true, y: true } } // Activate zoom
};
uplot = new uPlot(opts, plotData, plotWrapper);
} else {
// console.log("No data for roofline!");
}
}
/* On Mount */
onMount(() => {
render(data);
});
/* On Destroy */
onDestroy(() => {
if (uplot) uplot.destroy();
if (timeoutId != null) clearTimeout(timeoutId);
});
</script>
{#if data != null}
<div bind:this={plotWrapper} class="p-2"></div>
{:else}
<Card class="mx-4" body color="warning">Cannot render roofline: No data!</Card
>
{/if}

View File

@@ -3,8 +3,9 @@
Properties: Properties:
- `cluster String`: Currently selected cluster - `cluster String`: Currently selected cluster
- `selectedHistograms [String]`: The currently selected metrics to display as histogram
- `ìsOpen Bool`: Is selection opened [Bindable] - `ìsOpen Bool`: Is selection opened [Bindable]
- `configName String`: The config id string to be updated in database on selection change
- `presetSelectedHistograms [String]`: The currently selected metrics to display as histogram
- `applyChange Func`: The callback function to apply current selection - `applyChange Func`: The callback function to apply current selection
--> -->
@@ -25,6 +26,7 @@
let { let {
cluster, cluster,
isOpen = $bindable(), isOpen = $bindable(),
configName,
presetSelectedHistograms, presetSelectedHistograms,
applyChange applyChange
} = $props(); } = $props();
@@ -67,8 +69,8 @@
applyChange(selectedHistograms) applyChange(selectedHistograms)
updateConfiguration({ updateConfiguration({
name: cluster name: cluster
? `user_view_histogramMetrics:${cluster}` ? `${configName}:${cluster}`
: "user_view_histogramMetrics", : configName,
value: selectedHistograms, value: selectedHistograms,
}); });
} }

View File

@@ -96,9 +96,9 @@
function printAvailability(metric, cluster) { function printAvailability(metric, cluster) {
const avail = globalMetrics.find((gm) => gm.name === metric)?.availability const avail = globalMetrics.find((gm) => gm.name === metric)?.availability
if (!cluster) { if (!cluster) {
return avail.map((av) => av.cluster).join(',') return avail.map((av) => av.cluster).join(', ')
} else { } else {
return avail.find((av) => av.cluster === cluster).subClusters.join(',') return avail.find((av) => av.cluster === cluster).subClusters.join(', ')
} }
} }
@@ -208,7 +208,7 @@
/> />
{/if} {/if}
{metric} {metric}
<span style="float: right;"> <span style="float: right; text-align: justify;">
{printAvailability(metric, cluster)} {printAvailability(metric, cluster)}
</span> </span>
</li> </li>

View File

@@ -19,7 +19,7 @@
import { import {
transformDataForRoofline, transformDataForRoofline,
} from "../generic/utils.js"; } from "../generic/utils.js";
import Roofline from "../generic/plots/Roofline.svelte"; import Roofline from "../generic/plots/RooflineLegacy.svelte";
/* Svelte 5 Props */ /* Svelte 5 Props */
let { let {

View File

@@ -5,7 +5,7 @@ import Status from './Status.root.svelte'
mount(Status, { mount(Status, {
target: document.getElementById('svelte-app'), target: document.getElementById('svelte-app'),
props: { props: {
cluster: infos.cluster, presetCluster: infos.cluster,
}, },
context: new Map([ context: new Map([
['cc-config', clusterCockpitConfig] ['cc-config', clusterCockpitConfig]

View File

@@ -0,0 +1,159 @@
<!--
@component Main cluster status view component; renders current system-usage information
Properties:
- `presetCluster String`: The cluster to show status information for
-->
<script>
import { getContext } from "svelte";
import {
Row,
Col,
Spinner,
Card,
Icon,
Button,
} from "@sveltestrap/sveltestrap";
import {
queryStore,
gql,
getContextClient,
} from "@urql/svelte";
import {
init,
convert2uplot,
} from "../generic/utils.js";
import PlotGrid from "../generic/PlotGrid.svelte";
import Histogram from "../generic/plots/Histogram.svelte";
import HistogramSelection from "../generic/select/HistogramSelection.svelte";
import Refresher from "../generic/helper/Refresher.svelte";
/* Svelte 5 Props */
let {
presetCluster
} = $props();
/* Const Init */
const { query: initq } = init();
const ccconfig = getContext("cc-config");
const client = getContextClient();
/* State Init */
let cluster = $state(presetCluster);
// Histogram
let isHistogramSelectionOpen = $state(false);
let from = $state(new Date(Date.now() - (30 * 24 * 60 * 60 * 1000))); // Simple way to retrigger GQL: Jobs Started last Month
let to = $state(new Date(Date.now()));
/* Derived */
let selectedHistograms = $derived(cluster
? ccconfig[`status_view_selectedHistograms:${cluster}`] || ( ccconfig['status_view_selectedHistograms'] || [] )
: ccconfig['status_view_selectedHistograms'] || []);
// Note: nodeMetrics are requested on configured $timestep resolution
const metricStatusQuery = $derived(queryStore({
client: client,
query: gql`
query (
$filter: [JobFilter!]!
$selectedHistograms: [String!]
) {
jobsStatistics(filter: $filter, metrics: $selectedHistograms) {
histMetrics {
metric
unit
data {
min
max
count
bin
}
}
}
}
`,
variables: {
filter: [{ state: ["running"] }, { cluster: { eq: cluster}}, {startTime: { from, to }}],
selectedHistograms: selectedHistograms,
},
}));
</script>
<!-- Loading indicators & Metric Sleect -->
<Row class="justify-content-between">
<Col class="mb-2 mb-md-0" xs="12" md="5" lg="4" xl="3">
<Button
outline
color="secondary"
onclick={() => (isHistogramSelectionOpen = true)}
>
<Icon name="bar-chart-line" /> Select Histograms
</Button>
</Col>
<Col xs="12" md="5" lg="4" xl="3">
<Refresher
initially={120}
onRefresh={() => {
from = new Date(Date.now() - (30 * 24 * 60 * 60 * 1000)); // Triggers GQL
to = new Date(Date.now());
}}
/>
</Col>
</Row>
<Row cols={1} class="text-center mt-3">
<Col>
{#if $initq.fetching || $metricStatusQuery.fetching}
<Spinner />
{:else if $initq.error}
<Card body color="danger">{$initq.error.message}</Card>
{:else}
<!-- ... -->
{/if}
</Col>
</Row>
{#if $metricStatusQuery.error}
<Row cols={1}>
<Col>
<Card body color="danger">{$metricStatusQuery.error.message}</Card>
</Col>
</Row>
{/if}
{#if $initq.data && $metricStatusQuery.data}
<!-- Selectable Stats as Histograms : Average Values of Running Jobs -->
{#if selectedHistograms}
<!-- Note: Ignore '#snippet' Error in IDE -->
{#snippet gridContent(item)}
<Histogram
data={convert2uplot(item.data)}
title="Distribution of '{item.metric}' averages"
xlabel={`${item.metric} bin maximum ${item?.unit ? `[${item.unit}]` : ``}`}
xunit={item.unit}
ylabel="Number of Jobs"
yunit="Jobs"
usesBins
/>
{/snippet}
{#key $metricStatusQuery.data.jobsStatistics[0].histMetrics}
<PlotGrid
items={$metricStatusQuery.data.jobsStatistics[0].histMetrics}
itemsPerRow={2}
{gridContent}
/>
{/key}
{/if}
{/if}
<HistogramSelection
{cluster}
bind:isOpen={isHistogramSelectionOpen}
presetSelectedHistograms={selectedHistograms}
configName="status_view_selectedHistograms"
applyChange={(newSelection) => {
selectedHistograms = [...newSelection];
}}
/>

View File

@@ -0,0 +1,580 @@
<!--
@component Main cluster status view component; renders current system-usage information
Properties:
- `presetCluster String`: The cluster to show status information for
-->
<script>
import {
Row,
Col,
Card,
CardHeader,
CardTitle,
CardBody,
Table,
Progress,
Icon,
} from "@sveltestrap/sveltestrap";
import {
queryStore,
gql,
getContextClient,
} from "@urql/svelte";
import {
init,
} from "../generic/utils.js";
import { scaleNumbers, formatTime } from "../generic/units.js";
import Refresher from "../generic/helper/Refresher.svelte";
import Roofline from "../generic/plots/Roofline.svelte";
import Pie, { colors } from "../generic/plots/Pie.svelte";
/* Svelte 5 Props */
let {
presetCluster,
useCbColors = false,
useAltColors = false,
} = $props();
/* Const Init */
const { query: initq } = init();
const client = getContextClient();
/* State Init */
let cluster = $state(presetCluster);
let pieWidth = $state(0);
let plotWidths = $state([]);
let from = $state(new Date(Date.now() - 5 * 60 * 1000));
let to = $state(new Date(Date.now()));
// Bar Gauges
let allocatedNodes = $state({});
let allocatedAccs = $state({});
let flopRate = $state({});
let flopRateUnitPrefix = $state({});
let flopRateUnitBase = $state({});
let memBwRate = $state({});
let memBwRateUnitPrefix = $state({});
let memBwRateUnitBase = $state({});
// Plain Infos
let runningJobs = $state({});
let activeUsers = $state({});
let totalAccs = $state({});
/* Derived */
// Accumulated NodeStates for Piecharts
const nodesStateCounts = $derived(queryStore({
client: client,
query: gql`
query ($filter: [NodeFilter!]) {
nodeStates(filter: $filter) {
state
count
}
}
`,
variables: {
filter: { cluster: { eq: cluster }}
},
}));
const refinedStateData = $derived.by(() => {
return $nodesStateCounts?.data?.nodeStates.filter((e) => ['allocated', 'reserved', 'idle', 'mixed','down', 'unknown'].includes(e.state))
});
const refinedHealthData = $derived.by(() => {
return $nodesStateCounts?.data?.nodeStates.filter((e) => ['full', 'partial', 'failed'].includes(e.state))
});
// Note: nodeMetrics are requested on configured $timestep resolution
// Result: The latest 5 minutes (datapoints) for each node independent of job
const statusQuery = $derived(queryStore({
client: client,
query: gql`
query (
$cluster: String!
$metrics: [String!]
$from: Time!
$to: Time!
$jobFilter: [JobFilter!]!
$nodeFilter: [NodeFilter!]!
$paging: PageRequest!
$sorting: OrderByInput!
) {
# Node 5 Minute Averages for Roofline
nodeMetrics(
cluster: $cluster
metrics: $metrics
from: $from
to: $to
) {
host
subCluster
metrics {
name
metric {
series {
statistics {
avg
}
}
}
}
}
# Running Job Metric Average for Rooflines
jobsMetricStats(filter: $jobFilter, metrics: $metrics) {
id
jobId
duration
numNodes
numAccelerators
subCluster
stats {
name
data {
avg
}
}
}
# Get Jobs for Per-Node Counts
jobs(filter: $jobFilter, order: $sorting, page: $paging) {
items {
jobId
resources {
hostname
}
}
count
}
# Only counts shared nodes once
allocatedNodes(cluster: $cluster) {
name
count
}
# Get States for Node Roofline; $sorting unused in backend: Use placeholder
nodes(filter: $nodeFilter, order: $sorting) {
count
items {
hostname
cluster
subCluster
nodeState
}
}
# totalNodes includes multiples if shared jobs
jobsStatistics(
filter: $jobFilter
page: $paging
sortBy: TOTALJOBS
groupBy: SUBCLUSTER
) {
id
totalJobs
totalUsers
totalAccs
}
}
`,
variables: {
cluster: cluster,
metrics: ["flops_any", "mem_bw"], // Fixed names for roofline and status bars
from: from.toISOString(),
to: to.toISOString(),
jobFilter: [{ state: ["running"] }, { cluster: { eq: cluster } }],
nodeFilter: { cluster: { eq: cluster }},
paging: { itemsPerPage: -1, page: 1 }, // Get all: -1
sorting: { field: "startTime", type: "col", order: "DESC" }
},
}));
/* Effects */
$effect(() => {
if ($initq.data && $statusQuery.data) {
let subClusters = $initq.data.clusters.find(
(c) => c.name == cluster,
).subClusters;
for (let subCluster of subClusters) {
// Allocations
allocatedNodes[subCluster.name] =
$statusQuery.data.allocatedNodes.find(
({ name }) => name == subCluster.name,
)?.count || 0;
allocatedAccs[subCluster.name] =
$statusQuery.data.jobsStatistics.find(
({ id }) => id == subCluster.name,
)?.totalAccs || 0;
// Infos
activeUsers[subCluster.name] =
$statusQuery.data.jobsStatistics.find(
({ id }) => id == subCluster.name,
)?.totalUsers || 0;
runningJobs[subCluster.name] =
$statusQuery.data.jobsStatistics.find(
({ id }) => id == subCluster.name,
)?.totalJobs || 0;
totalAccs[subCluster.name] =
(subCluster?.numberOfNodes * subCluster?.topology?.accelerators?.length) || null;
// Keymetrics
flopRate[subCluster.name] =
Math.floor(
sumUp($statusQuery.data.nodeMetrics, subCluster.name, "flops_any") *
100,
) / 100;
flopRateUnitPrefix[subCluster.name] = subCluster.flopRateSimd.unit.prefix;
flopRateUnitBase[subCluster.name] = subCluster.flopRateSimd.unit.base;
memBwRate[subCluster.name] =
Math.floor(
sumUp($statusQuery.data.nodeMetrics, subCluster.name, "mem_bw") * 100,
) / 100;
memBwRateUnitPrefix[subCluster.name] =
subCluster.memoryBandwidth.unit.prefix;
memBwRateUnitBase[subCluster.name] = subCluster.memoryBandwidth.unit.base;
}
}
});
/* Const Functions */
const sumUp = (data, subcluster, metric) =>
data.reduce(
(sum, node) =>
node.subCluster == subcluster
? sum +
(node.metrics
.find((m) => m.name == metric)
?.metric?.series[0]?.statistics?.avg || 0
)
: sum,
0,
);
/* Functions */
function transformJobsStatsToData(subclusterData) {
/* c will contain values from 0 to 1 representing the duration */
let data = null
const x = [], y = [], c = [], day = 86400.0
if (subclusterData) {
for (let i = 0; i < subclusterData.length; i++) {
const flopsData = subclusterData[i].stats.find((s) => s.name == "flops_any")
const memBwData = subclusterData[i].stats.find((s) => s.name == "mem_bw")
const f = flopsData.data.avg
const m = memBwData.data.avg
const d = subclusterData[i].duration / day
const intensity = f / m
if (Number.isNaN(intensity) || !Number.isFinite(intensity))
continue
x.push(intensity)
y.push(f)
// Long Jobs > 1 Day: Use max Color
if (d > 1.0) c.push(1.0)
else c.push(d)
}
} else {
console.warn("transformJobsStatsToData: metrics for 'mem_bw' and/or 'flops_any' missing!")
}
if (x.length > 0 && y.length > 0 && c.length > 0) {
data = [null, [x, y], c] // for dataformat see roofline.svelte
}
return data
}
function transformNodesStatsToData(subclusterData) {
let data = null
const x = [], y = []
if (subclusterData) {
for (let i = 0; i < subclusterData.length; i++) {
const flopsData = subclusterData[i].metrics.find((s) => s.name == "flops_any")
const memBwData = subclusterData[i].metrics.find((s) => s.name == "mem_bw")
const f = flopsData.metric.series[0].statistics.avg
const m = memBwData.metric.series[0].statistics.avg
let intensity = f / m
if (Number.isNaN(intensity) || !Number.isFinite(intensity)) {
intensity = 0.0 // Set to Float Zero: Will not show in Log-Plot (Always below render limit)
}
x.push(intensity)
y.push(f)
}
} else {
// console.warn("transformNodesStatsToData: metrics for 'mem_bw' and/or 'flops_any' missing!")
}
if (x.length > 0 && y.length > 0) {
data = [null, [x, y]] // for dataformat see roofline.svelte
}
return data
}
function transformJobsStatsToInfo(subclusterData) {
if (subclusterData) {
return subclusterData.map((sc) => { return {id: sc.id, jobId: sc.jobId, numNodes: sc.numNodes, numAcc: sc?.numAccelerators? sc.numAccelerators : 0, duration: formatTime(sc.duration)} })
} else {
console.warn("transformJobsStatsToInfo: jobInfo missing!")
return []
}
}
function transformNodesStatsToInfo(subClusterData) {
let result = [];
if (subClusterData) { // && $nodesState?.data) {
// Use Nodes as Returned from CCMS, *NOT* as saved in DB via SlurmState-API!
for (let j = 0; j < subClusterData.length; j++) {
const nodeName = subClusterData[j]?.host ? subClusterData[j].host : "unknown"
const nodeMatch = $statusQuery?.data?.nodes?.items?.find((n) => n.hostname == nodeName && n.subCluster == subClusterData[j].subCluster);
const nodeState = nodeMatch?.nodeState ? nodeMatch.nodeState : "notindb"
let numJobs = 0
if ($statusQuery?.data) {
const nodeJobs = $statusQuery?.data?.jobs?.items?.filter((job) => job.resources.find((res) => res.hostname == nodeName))
numJobs = nodeJobs?.length ? nodeJobs.length : 0
}
result.push({nodeName: nodeName, nodeState: nodeState, numJobs: numJobs})
};
};
return result
}
function legendColors(targetIdx) {
// Reuses first color if targetIdx overflows
let c;
if (useCbColors) {
c = [...colors['colorblind']];
} else if (useAltColors) {
c = [...colors['alternative']];
} else {
c = [...colors['default']];
}
return c[(c.length + targetIdx) % c.length];
}
</script>
<!-- Refresher and space for other options -->
<Row class="justify-content-end">
<Col xs="12" md="5" lg="4" xl="3">
<Refresher
initially={120}
onRefresh={() => {
from = new Date(Date.now() - 5 * 60 * 1000);
to = new Date(Date.now());
}}
/>
</Col>
</Row>
<hr/>
<!-- Node Health Pis, later Charts -->
{#if $initq.data && $nodesStateCounts.data}
<Row cols={{ lg: 4, md: 2 , sm: 1}} class="mb-3 justify-content-center">
<Col class="px-3 mt-2 mt-lg-0">
<div bind:clientWidth={pieWidth}>
{#key refinedStateData}
<h4 class="text-center">
{cluster.charAt(0).toUpperCase() + cluster.slice(1)} Node States
</h4>
<Pie
{useAltColors}
canvasId="hpcpie-slurm"
size={pieWidth * 0.55}
sliceLabel="Nodes"
quantities={refinedStateData.map(
(sd) => sd.count,
)}
entities={refinedStateData.map(
(sd) => sd.state,
)}
/>
{/key}
</div>
</Col>
<Col class="px-4 py-2">
{#key refinedStateData}
<Table>
<tr class="mb-2">
<th></th>
<th>Current State</th>
<th>Nodes</th>
</tr>
{#each refinedStateData as sd, i}
<tr>
<td><Icon name="circle-fill" style="color: {legendColors(i)};"/></td>
<td>{sd.state}</td>
<td>{sd.count}</td>
</tr>
{/each}
</Table>
{/key}
</Col>
<Col class="px-3 mt-2 mt-lg-0">
<div bind:clientWidth={pieWidth}>
{#key refinedHealthData}
<h4 class="text-center">
{cluster.charAt(0).toUpperCase() + cluster.slice(1)} Node Health
</h4>
<Pie
{useAltColors}
canvasId="hpcpie-health"
size={pieWidth * 0.55}
sliceLabel="Nodes"
quantities={refinedHealthData.map(
(sd) => sd.count,
)}
entities={refinedHealthData.map(
(sd) => sd.state,
)}
/>
{/key}
</div>
</Col>
<Col class="px-4 py-2">
{#key refinedHealthData}
<Table>
<tr class="mb-2">
<th></th>
<th>Current Health</th>
<th>Nodes</th>
</tr>
{#each refinedHealthData as hd, i}
<tr>
<td><Icon name="circle-fill" style="color: {legendColors(i)};" /></td>
<td>{hd.state}</td>
<td>{hd.count}</td>
</tr>
{/each}
</Table>
{/key}
</Col>
</Row>
{/if}
<hr/>
<!-- Gauges & Roofline per Subcluster-->
{#if $initq.data && $statusQuery.data}
{#each $initq.data.clusters.find((c) => c.name == cluster).subClusters as subCluster, i}
<Row cols={{ lg: 3, md: 1 , sm: 1}} class="mb-3 justify-content-center">
<Col class="px-3">
<Card class="h-auto mt-1">
<CardHeader>
<CardTitle class="mb-0">SubCluster "{subCluster.name}"</CardTitle>
<span>{subCluster.processorType}</span>
</CardHeader>
<CardBody>
<Table borderless>
<tr class="py-2">
<td style="font-size:x-large;">{runningJobs[subCluster.name]} Running Jobs</td>
<td colspan="2" style="font-size:x-large;">{activeUsers[subCluster.name]} Active Users</td>
</tr>
<hr class="my-1"/>
<tr class="pt-2">
<td style="font-size: large;">
Flop Rate (<span style="cursor: help;" title="Flops[Any] = (Flops[Double] x 2) + Flops[Single]">Any</span>)
</td>
<td colspan="2" style="font-size: large;">
Memory BW Rate
</td>
</tr>
<tr class="pb-2">
<td style="font-size:x-large;">
{flopRate[subCluster.name]}
{flopRateUnitPrefix[subCluster.name]}{flopRateUnitBase[subCluster.name]}
</td>
<td colspan="2" style="font-size:x-large;">
{memBwRate[subCluster.name]}
{memBwRateUnitPrefix[subCluster.name]}{memBwRateUnitBase[subCluster.name]}
</td>
</tr>
<hr class="my-1"/>
<tr class="py-2">
<th scope="col">Allocated Nodes</th>
<td style="min-width: 100px;"
><div class="col">
<Progress
value={allocatedNodes[subCluster.name]}
max={subCluster.numberOfNodes}
/>
</div></td
>
<td
>{allocatedNodes[subCluster.name]} / {subCluster.numberOfNodes}
Nodes</td
>
</tr>
{#if totalAccs[subCluster.name] !== null}
<tr class="py-2">
<th scope="col">Allocated Accelerators</th>
<td style="min-width: 100px;"
><div class="col">
<Progress
value={allocatedAccs[subCluster.name]}
max={totalAccs[subCluster.name]}
/>
</div></td
>
<td
>{allocatedAccs[subCluster.name]} / {totalAccs[subCluster.name]}
Accelerators</td
>
</tr>
{/if}
</Table>
</CardBody>
</Card>
</Col>
<Col class="px-3 mt-2 mt-lg-0">
<div bind:clientWidth={plotWidths[i]}>
{#key $statusQuery?.data?.nodeMetrics}
<Roofline
useColors={true}
allowSizeChange
width={plotWidths[i] - 10}
height={300}
cluster={cluster}
subCluster={subCluster}
roofData={transformNodesStatsToData($statusQuery?.data?.nodeMetrics.filter(
(data) => data.subCluster == subCluster.name,
)
)}
nodesData={transformNodesStatsToInfo($statusQuery?.data?.nodeMetrics.filter(
(data) => data.subCluster == subCluster.name,
)
)}
/>
{/key}
</div>
</Col>
<Col class="px-3 mt-2 mt-lg-0">
<div bind:clientWidth={plotWidths[i]}>
{#key $statusQuery?.data?.jobsMetricStats}
<Roofline
useColors={true}
allowSizeChange
width={plotWidths[i] - 10}
height={300}
subCluster={subCluster}
roofData={transformJobsStatsToData($statusQuery?.data?.jobsMetricStats.filter(
(data) => data.subCluster == subCluster.name,
)
)}
jobsData={transformJobsStatsToInfo($statusQuery?.data?.jobsMetricStats.filter(
(data) => data.subCluster == subCluster.name,
)
)}
/>
{/key}
</div>
</Col>
</Row>
{/each}
{:else}
<Card class="mx-4" body color="warning">Cannot render status rooflines: No data!</Card>
{/if}

View File

@@ -0,0 +1,547 @@
<!--
@component Main cluster status view component; renders current system-usage information
Properties:
- `presetCluster String`: The cluster to show status information for
-->
<script>
import {
Row,
Col,
Spinner,
Card,
Table,
Icon,
Tooltip,
Input,
InputGroup,
InputGroupText
} from "@sveltestrap/sveltestrap";
import {
queryStore,
gql,
getContextClient,
} from "@urql/svelte";
import {
init,
scramble,
scrambleNames,
convert2uplot,
} from "../generic/utils.js";
import Pie, { colors } from "../generic/plots/Pie.svelte";
import Histogram from "../generic/plots/Histogram.svelte";
import Refresher from "../generic/helper/Refresher.svelte";
/* Svelte 5 Props */
let {
presetCluster,
useCbColors = false,
useAltColors = false
} = $props();
/* Const Init */
const { query: initq } = init();
const client = getContextClient();
const durationBinOptions = ["1m","10m","1h","6h","12h"];
/* State Init */
let cluster = $state(presetCluster)
let from = $state(new Date(Date.now() - (30 * 24 * 60 * 60 * 1000))); // Simple way to retrigger GQL: Jobs Started last Month
let to = $state(new Date(Date.now()));
let colWidthJobs = $state(0);
let colWidthNodes = $state(0);
let colWidthAccs = $state(0);
let numDurationBins = $state("1h");
/* Derived */
const topJobsQuery = $derived(queryStore({
client: client,
query: gql`
query (
$filter: [JobFilter!]!
$paging: PageRequest!
) {
topUser: jobsStatistics(
filter: $filter
page: $paging
sortBy: TOTALJOBS
groupBy: USER
) {
id
name
totalJobs
}
topProjects: jobsStatistics(
filter: $filter
page: $paging
sortBy: TOTALJOBS
groupBy: PROJECT
) {
id
totalJobs
}
}
`,
variables: {
filter: [{ state: ["running"] }, { cluster: { eq: cluster}}, {startTime: { from, to }}],
paging: { itemsPerPage: 10, page: 1 } // Top 10
},
}));
const topNodesQuery = $derived(queryStore({
client: client,
query: gql`
query (
$filter: [JobFilter!]!
$paging: PageRequest!
) {
topUser: jobsStatistics(
filter: $filter
page: $paging
sortBy: TOTALNODES
groupBy: USER
) {
id
name
totalNodes
}
topProjects: jobsStatistics(
filter: $filter
page: $paging
sortBy: TOTALNODES
groupBy: PROJECT
) {
id
totalNodes
}
}
`,
variables: {
filter: [{ state: ["running"] }, { cluster: { eq: cluster }}, {startTime: { from, to }}],
paging: { itemsPerPage: 10, page: 1 } // Top 10
},
}));
const topAccsQuery = $derived(queryStore({
client: client,
query: gql`
query (
$filter: [JobFilter!]!
$paging: PageRequest!
) {
topUser: jobsStatistics(
filter: $filter
page: $paging
sortBy: TOTALACCS
groupBy: USER
) {
id
name
totalAccs
}
topProjects: jobsStatistics(
filter: $filter
page: $paging
sortBy: TOTALACCS
groupBy: PROJECT
) {
id
totalAccs
}
}
`,
variables: {
filter: [{ state: ["running"] }, { cluster: { eq: cluster }}, {startTime: { from, to }}],
paging: { itemsPerPage: 10, page: 1 } // Top 10
},
}));
// Note: nodeMetrics are requested on configured $timestep resolution
const nodeStatusQuery = $derived(queryStore({
client: client,
query: gql`
query (
$filter: [JobFilter!]!
$selectedHistograms: [String!]
$numDurationBins: String
) {
jobsStatistics(filter: $filter, metrics: $selectedHistograms, numDurationBins: $numDurationBins) {
histDuration {
count
value
}
histNumNodes {
count
value
}
histNumAccs {
count
value
}
}
}
`,
variables: {
filter: [{ state: ["running"] }, { cluster: { eq: cluster }}, {startTime: { from, to }}],
selectedHistograms: [], // No Metrics requested for node hardware stats
numDurationBins: numDurationBins,
},
}));
/* Functions */
function legendColors(targetIdx) {
// Reuses first color if targetIdx overflows
let c;
if (useCbColors) {
c = [...colors['colorblind']];
} else if (useAltColors) {
c = [...colors['alternative']];
} else {
c = [...colors['default']];
}
return c[(c.length + targetIdx) % c.length];
}
</script>
<!-- Refresher and space for other options -->
<Row class="justify-content-between">
<Col class="mb-2 mb-md-0" xs="12" md="5" lg="4" xl="3">
<InputGroup>
<InputGroupText>
<Icon name="bar-chart-line-fill" />
</InputGroupText>
<InputGroupText>
Duration Bin Size
</InputGroupText>
<Input type="select" bind:value={numDurationBins}>
{#each durationBinOptions as dbin}
<option value={dbin}>{dbin}</option>
{/each}
</Input>
</InputGroup>
</Col>
<Col xs="12" md="5" lg="4" xl="3">
<Refresher
initially={120}
onRefresh={() => {
from = new Date(Date.now() - (30 * 24 * 60 * 60 * 1000)); // Triggers GQL
to = new Date(Date.now());
}}
/>
</Col>
</Row>
<hr/>
<!-- Job Duration, Top Users and Projects-->
{#if $topJobsQuery.fetching || $nodeStatusQuery.fetching}
<Spinner />
{:else if $topJobsQuery.data && $nodeStatusQuery.data}
<Row>
<Col xs="12" lg="4" class="p-2">
{#key $nodeStatusQuery.data.jobsStatistics[0].histDuration}
<Histogram
data={convert2uplot($nodeStatusQuery.data.jobsStatistics[0].histDuration)}
title="Duration Distribution"
xlabel="Current Job Runtimes"
xunit="Runtime"
ylabel="Number of Jobs"
yunit="Jobs"
height="275"
usesBins
xtime
/>
{/key}
</Col>
<Col xs="6" md="3" lg="2" class="p-2">
<div bind:clientWidth={colWidthJobs}>
<h4 class="text-center">
Top Users: Jobs
</h4>
<Pie
{useAltColors}
canvasId="hpcpie-jobs-users"
size={colWidthJobs * 0.75}
sliceLabel="Jobs"
quantities={$topJobsQuery.data.topUser.map(
(tu) => tu['totalJobs'],
)}
entities={$topJobsQuery.data.topUser.map((tu) => scrambleNames ? scramble(tu.id) : tu.id)}
/>
</div>
</Col>
<Col xs="6" md="3" lg="2" class="p-2">
<Table>
<tr class="mb-2">
<th></th>
<th style="padding-left: 0.5rem;">User</th>
<th>Jobs</th>
</tr>
{#each $topJobsQuery.data.topUser as tu, i}
<tr>
<td><Icon name="circle-fill" style="color: {legendColors(i)};" /></td>
<td id="topName-jobs-{tu.id}">
<a target="_blank" href="/monitoring/user/{tu.id}?cluster={cluster}&state=running"
>{scrambleNames ? scramble(tu.id) : tu.id}
</a>
</td>
{#if tu?.name}
<Tooltip
target={`topName-jobs-${tu.id}`}
placement="left"
>{scrambleNames ? scramble(tu.name) : tu.name}</Tooltip
>
{/if}
<td>{tu['totalJobs']}</td>
</tr>
{/each}
</Table>
</Col>
<Col xs="6" md="3" lg="2" class="p-2">
<h4 class="text-center">
Top Projects: Jobs
</h4>
<Pie
{useAltColors}
canvasId="hpcpie-jobs-projects"
size={colWidthJobs * 0.75}
sliceLabel={'Jobs'}
quantities={$topJobsQuery.data.topProjects.map(
(tp) => tp['totalJobs'],
)}
entities={$topJobsQuery.data.topProjects.map((tp) => scrambleNames ? scramble(tp.id) : tp.id)}
/>
</Col>
<Col xs="6" md="3" lg="2" class="p-2">
<Table>
<tr class="mb-2">
<th></th>
<th style="padding-left: 0.5rem;">Project</th>
<th>Jobs</th>
</tr>
{#each $topJobsQuery.data.topProjects as tp, i}
<tr>
<td><Icon name="circle-fill" style="color: {legendColors(i)};" /></td>
<td>
<a target="_blank" href="/monitoring/jobs/?cluster={cluster}&state=running&project={tp.id}&projectMatch=eq"
>{scrambleNames ? scramble(tp.id) : tp.id}
</a>
</td>
<td>{tp['totalJobs']}</td>
</tr>
{/each}
</Table>
</Col>
</Row>
{:else}
<Card class="mx-4" body color="warning">Cannot render job status charts: No data!</Card>
{/if}
<hr/>
<!-- Node Distribution, Top Users and Projects-->
{#if $topNodesQuery.fetching || $nodeStatusQuery.fetching}
<Spinner />
{:else if $topNodesQuery.data && $nodeStatusQuery.data}
<Row>
<Col xs="12" lg="4" class="p-2">
<Histogram
data={convert2uplot($nodeStatusQuery.data.jobsStatistics[0].histNumNodes)}
title="Number of Nodes Distribution"
xlabel="Allocated Nodes"
xunit="Nodes"
ylabel="Number of Jobs"
yunit="Jobs"
height="275"
/>
</Col>
<Col xs="6" md="3" lg="2" class="p-2">
<div bind:clientWidth={colWidthNodes}>
<h4 class="text-center">
Top Users: Nodes
</h4>
<Pie
{useAltColors}
canvasId="hpcpie-nodes-users"
size={colWidthNodes * 0.75}
sliceLabel="Nodes"
quantities={$topNodesQuery.data.topUser.map(
(tu) => tu['totalNodes'],
)}
entities={$topNodesQuery.data.topUser.map((tu) => scrambleNames ? scramble(tu.id) : tu.id)}
/>
</div>
</Col>
<Col xs="6" md="3" lg="2" class="p-2">
<Table>
<tr class="mb-2">
<th></th>
<th style="padding-left: 0.5rem;">User</th>
<th>Nodes</th>
</tr>
{#each $topNodesQuery.data.topUser as tu, i}
<tr>
<td><Icon name="circle-fill" style="color: {legendColors(i)};" /></td>
<td id="topName-nodes-{tu.id}">
<a target="_blank" href="/monitoring/user/{tu.id}?cluster={cluster}&state=running"
>{scrambleNames ? scramble(tu.id) : tu.id}
</a>
</td>
{#if tu?.name}
<Tooltip
target={`topName-nodes-${tu.id}`}
placement="left"
>{scrambleNames ? scramble(tu.name) : tu.name}</Tooltip
>
{/if}
<td>{tu['totalNodes']}</td>
</tr>
{/each}
</Table>
</Col>
<Col xs="6" md="3" lg="2" class="p-2">
<h4 class="text-center">
Top Projects: Nodes
</h4>
<Pie
{useAltColors}
canvasId="hpcpie-nodes-projects"
size={colWidthNodes * 0.75}
sliceLabel={'Nodes'}
quantities={$topNodesQuery.data.topProjects.map(
(tp) => tp['totalNodes'],
)}
entities={$topNodesQuery.data.topProjects.map((tp) => scrambleNames ? scramble(tp.id) : tp.id)}
/>
</Col>
<Col xs="6" md="3" lg="2" class="p-2">
<Table>
<tr class="mb-2">
<th></th>
<th style="padding-left: 0.5rem;">Project</th>
<th>Nodes</th>
</tr>
{#each $topNodesQuery.data.topProjects as tp, i}
<tr>
<td><Icon name="circle-fill" style="color: {legendColors(i)};" /></td>
<td>
<a target="_blank" href="/monitoring/jobs/?cluster={cluster}&state=running&project={tp.id}&projectMatch=eq"
>{scrambleNames ? scramble(tp.id) : tp.id}
</a>
</td>
<td>{tp['totalNodes']}</td>
</tr>
{/each}
</Table>
</Col>
</Row>
{:else}
<Card class="mx-4" body color="warning">Cannot render node status charts: No data!</Card>
{/if}
<hr/>
<!-- Acc Distribution, Top Users and Projects-->
{#if $topAccsQuery.fetching || $nodeStatusQuery.fetching}
<Spinner />
{:else if $topAccsQuery.data && $nodeStatusQuery.data}
<Row>
<Col xs="12" lg="4" class="p-2">
<Histogram
data={convert2uplot($nodeStatusQuery.data.jobsStatistics[0].histNumAccs)}
title="Number of Accelerators Distribution"
xlabel="Allocated Accs"
xunit="Accs"
ylabel="Number of Jobs"
yunit="Jobs"
height="275"
/>
</Col>
<Col xs="6" md="3" lg="2" class="p-2">
<div bind:clientWidth={colWidthAccs}>
<h4 class="text-center">
Top Users: GPUs
</h4>
<Pie
{useAltColors}
canvasId="hpcpie-accs-users"
size={colWidthAccs * 0.75}
sliceLabel="GPUs"
quantities={$topAccsQuery.data.topUser.map(
(tu) => tu['totalAccs'],
)}
entities={$topAccsQuery.data.topUser.map((tu) => scrambleNames ? scramble(tu.id) : tu.id)}
/>
</div>
</Col>
<Col xs="6" md="3" lg="2" class="p-2">
<Table>
<tr class="mb-2">
<th></th>
<th style="padding-left: 0.5rem;">User</th>
<th>GPUs</th>
</tr>
{#each $topAccsQuery.data.topUser as tu, i}
<tr>
<td><Icon name="circle-fill" style="color: {legendColors(i)};" /></td>
<td id="topName-accs-{tu.id}">
<a target="_blank" href="/monitoring/user/{tu.id}?cluster={cluster}&state=running"
>{scrambleNames ? scramble(tu.id) : tu.id}
</a>
</td>
{#if tu?.name}
<Tooltip
target={`topName-accs-${tu.id}`}
placement="left"
>{scrambleNames ? scramble(tu.name) : tu.name}</Tooltip
>
{/if}
<td>{tu['totalAccs']}</td>
</tr>
{/each}
</Table>
</Col>
<Col xs="6" md="3" lg="2" class="p-2">
<h4 class="text-center">
Top Projects: GPUs
</h4>
<Pie
{useAltColors}
canvasId="hpcpie-accs-projects"
size={colWidthAccs * 0.75}
sliceLabel={'GPUs'}
quantities={$topAccsQuery.data.topProjects.map(
(tp) => tp['totalAccs'],
)}
entities={$topAccsQuery.data.topProjects.map((tp) => scrambleNames ? scramble(tp.id) : tp.id)}
/>
</Col>
<Col xs="6" md="3" lg="2" class="p-2">
<Table>
<tr class="mb-2">
<th></th>
<th style="padding-left: 0.5rem;">Project</th>
<th>GPUs</th>
</tr>
{#each $topAccsQuery.data.topProjects as tp, i}
<tr>
<td><Icon name="circle-fill" style="color: {legendColors(i)};" /></td>
<td>
<a target="_blank" href="/monitoring/jobs/?cluster={cluster}&state=running&project={tp.id}&projectMatch=eq"
>{scrambleNames ? scramble(tp.id) : tp.id}
</a>
</td>
<td>{tp['totalAccs']}</td>
</tr>
{/each}
</Table>
</Col>
</Row>
{:else}
<Card class="mx-4" body color="warning">Cannot render accelerator status charts: No data!</Card>
{/if}