mirror of
https://github.com/ClusterCockpit/cc-backend
synced 2025-08-02 17:30:36 +02:00
Compare commits
32 Commits
135-batch-
...
v1.2.2
Author | SHA1 | Date | |
---|---|---|---|
|
280b16c11c | ||
4b922c575e | |||
|
09528ed6b9 | ||
|
e61ff01518 | ||
|
a4c68bf7fe | ||
bb1c8cc25d | |||
4b06fa788d | |||
|
ab08600486 | ||
7a5ccff6da | |||
a407a5cf01 | |||
2b3e2f25ec | |||
ed5ecbd914 | |||
2d4759114e | |||
c68b9fec42 | |||
|
0f34c8cac6 | ||
|
d388a45630 | ||
|
84b63af080 | ||
20902f842d | |||
df7217f79c | |||
|
bd6f38b4f3 | ||
|
827a85412e | ||
|
c4a9fcc1ca | ||
|
0993549cac | ||
|
1b8c4e293c | ||
|
b449b77b95 | ||
|
f235b1a99c | ||
|
b2b4beaeaa | ||
|
8d7f942de4 | ||
|
c1b5134627 | ||
|
f5c43d60d3 | ||
|
69ee19bed0 | ||
|
4d7819802d |
@@ -101,5 +101,6 @@ release:
|
|||||||
draft: false
|
draft: false
|
||||||
footer: |
|
footer: |
|
||||||
Supports job archive version 1 and database version 6.
|
Supports job archive version 1 and database version 6.
|
||||||
|
Please check out the [Release Notes](https://github.com/ClusterCockpit/cc-backend/blob/master/ReleaseNotes.md) for further details on breaking changes.
|
||||||
|
|
||||||
# vim: set ts=2 sw=2 tw=0 fo=cnqoj
|
# vim: set ts=2 sw=2 tw=0 fo=cnqoj
|
||||||
|
2
Makefile
2
Makefile
@@ -2,7 +2,7 @@ TARGET = ./cc-backend
|
|||||||
VAR = ./var
|
VAR = ./var
|
||||||
CFG = config.json .env
|
CFG = config.json .env
|
||||||
FRONTEND = ./web/frontend
|
FRONTEND = ./web/frontend
|
||||||
VERSION = 1.2.0
|
VERSION = 1.2.2
|
||||||
GIT_HASH := $(shell git rev-parse --short HEAD || echo 'development')
|
GIT_HASH := $(shell git rev-parse --short HEAD || echo 'development')
|
||||||
CURRENT_TIME = $(shell date +"%Y-%m-%d:T%H:%M:%S")
|
CURRENT_TIME = $(shell date +"%Y-%m-%d:T%H:%M:%S")
|
||||||
LD_FLAGS = '-s -X main.date=${CURRENT_TIME} -X main.version=${VERSION} -X main.commit=${GIT_HASH}'
|
LD_FLAGS = '-s -X main.date=${CURRENT_TIME} -X main.version=${VERSION} -X main.commit=${GIT_HASH}'
|
||||||
|
@@ -1,3 +1,6 @@
|
|||||||
|
# NOTE
|
||||||
|
Please have a look at the [Release Notes](https://github.com/ClusterCockpit/cc-backend/blob/master/ReleaseNotes.md) for breaking changes!
|
||||||
|
|
||||||
# ClusterCockpit REST and GraphQL API backend
|
# ClusterCockpit REST and GraphQL API backend
|
||||||
|
|
||||||
[](https://github.com/ClusterCockpit/cc-backend/actions/workflows/test.yml)
|
[](https://github.com/ClusterCockpit/cc-backend/actions/workflows/test.yml)
|
||||||
|
@@ -1,4 +1,4 @@
|
|||||||
# `cc-backend` version 1.2.0
|
# `cc-backend` version 1.2.2
|
||||||
|
|
||||||
Supports job archive version 1 and database version 6.
|
Supports job archive version 1 and database version 6.
|
||||||
|
|
||||||
@@ -7,7 +7,7 @@ implementation of ClusterCockpit.
|
|||||||
|
|
||||||
** Breaking changes **
|
** Breaking changes **
|
||||||
|
|
||||||
* The LDAP configuration option user_filter was changed and now should not include
|
* The LDAP configuration option `user_filter` was changed and now should not include
|
||||||
the uid wildcard. Example:
|
the uid wildcard. Example:
|
||||||
- Old: `"user_filter": "(&(objectclass=posixAccount)(uid=*))"`
|
- Old: `"user_filter": "(&(objectclass=posixAccount)(uid=*))"`
|
||||||
- New: `"user_filter": "(&(objectclass=posixAccount))"`
|
- New: `"user_filter": "(&(objectclass=posixAccount))"`
|
||||||
@@ -25,6 +25,10 @@ is not the number of cores the core hours will be too high by a factor!
|
|||||||
validity. Some key names have changed, please refer to
|
validity. Some key names have changed, please refer to
|
||||||
[config documentation](./configs/README.md) for details.
|
[config documentation](./configs/README.md) for details.
|
||||||
|
|
||||||
|
* The following API endpoints are only accessible from IPs registered using the apiAllowedIPs configuration option:
|
||||||
|
- `/users/` [GET, POST, DELETE]
|
||||||
|
- `/user/{id}` [POST]
|
||||||
|
|
||||||
** NOTE **
|
** NOTE **
|
||||||
If you are using the sqlite3 backend the `PRAGMA` option `foreign_keys` must be
|
If you are using the sqlite3 backend the `PRAGMA` option `foreign_keys` must be
|
||||||
explicitly set to ON. If using the sqlite3 console it is per default set to
|
explicitly set to ON. If using the sqlite3 console it is per default set to
|
||||||
|
@@ -29,7 +29,7 @@
|
|||||||
"application/json"
|
"application/json"
|
||||||
],
|
],
|
||||||
"tags": [
|
"tags": [
|
||||||
"query"
|
"Job query"
|
||||||
],
|
],
|
||||||
"summary": "Lists all jobs",
|
"summary": "Lists all jobs",
|
||||||
"parameters": [
|
"parameters": [
|
||||||
@@ -127,7 +127,7 @@
|
|||||||
"application/json"
|
"application/json"
|
||||||
],
|
],
|
||||||
"tags": [
|
"tags": [
|
||||||
"remove"
|
"Job remove"
|
||||||
],
|
],
|
||||||
"summary": "Remove a job from the sql database",
|
"summary": "Remove a job from the sql database",
|
||||||
"parameters": [
|
"parameters": [
|
||||||
@@ -199,7 +199,7 @@
|
|||||||
"application/json"
|
"application/json"
|
||||||
],
|
],
|
||||||
"tags": [
|
"tags": [
|
||||||
"remove"
|
"Job remove"
|
||||||
],
|
],
|
||||||
"summary": "Remove a job from the sql database",
|
"summary": "Remove a job from the sql database",
|
||||||
"parameters": [
|
"parameters": [
|
||||||
@@ -269,7 +269,7 @@
|
|||||||
"application/json"
|
"application/json"
|
||||||
],
|
],
|
||||||
"tags": [
|
"tags": [
|
||||||
"remove"
|
"Job remove"
|
||||||
],
|
],
|
||||||
"summary": "Remove a job from the sql database",
|
"summary": "Remove a job from the sql database",
|
||||||
"parameters": [
|
"parameters": [
|
||||||
@@ -342,7 +342,7 @@
|
|||||||
"application/json"
|
"application/json"
|
||||||
],
|
],
|
||||||
"tags": [
|
"tags": [
|
||||||
"add and modify"
|
"Job add and modify"
|
||||||
],
|
],
|
||||||
"summary": "Adds a new job as \"running\"",
|
"summary": "Adds a new job as \"running\"",
|
||||||
"parameters": [
|
"parameters": [
|
||||||
@@ -408,7 +408,7 @@
|
|||||||
"application/json"
|
"application/json"
|
||||||
],
|
],
|
||||||
"tags": [
|
"tags": [
|
||||||
"add and modify"
|
"Job add and modify"
|
||||||
],
|
],
|
||||||
"summary": "Marks job as completed and triggers archiving",
|
"summary": "Marks job as completed and triggers archiving",
|
||||||
"parameters": [
|
"parameters": [
|
||||||
@@ -483,7 +483,7 @@
|
|||||||
"application/json"
|
"application/json"
|
||||||
],
|
],
|
||||||
"tags": [
|
"tags": [
|
||||||
"add and modify"
|
"Job add and modify"
|
||||||
],
|
],
|
||||||
"summary": "Marks job as completed and triggers archiving",
|
"summary": "Marks job as completed and triggers archiving",
|
||||||
"parameters": [
|
"parameters": [
|
||||||
@@ -565,7 +565,7 @@
|
|||||||
"application/json"
|
"application/json"
|
||||||
],
|
],
|
||||||
"tags": [
|
"tags": [
|
||||||
"add and modify"
|
"Job add and modify"
|
||||||
],
|
],
|
||||||
"summary": "Adds one or more tags to a job",
|
"summary": "Adds one or more tags to a job",
|
||||||
"parameters": [
|
"parameters": [
|
||||||
@@ -638,7 +638,7 @@
|
|||||||
"application/json"
|
"application/json"
|
||||||
],
|
],
|
||||||
"tags": [
|
"tags": [
|
||||||
"query"
|
"Job query"
|
||||||
],
|
],
|
||||||
"summary": "Get complete job meta and metric data",
|
"summary": "Get complete job meta and metric data",
|
||||||
"parameters": [
|
"parameters": [
|
||||||
@@ -715,7 +715,7 @@
|
|||||||
"ApiKeyAuth": []
|
"ApiKeyAuth": []
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"description": "Modifies user defined by username (id) in one of four possible ways.\nIf more than one formValue is set then only the highest priority field is used.",
|
"description": "Modifies user defined by username (id) in one of four possible ways.\nIf more than one formValue is set then only the highest priority field is used.\nOnly accessible from IPs registered with apiAllowedIPs configuration option.",
|
||||||
"consumes": [
|
"consumes": [
|
||||||
"multipart/form-data"
|
"multipart/form-data"
|
||||||
],
|
],
|
||||||
@@ -723,7 +723,7 @@
|
|||||||
"text/plain"
|
"text/plain"
|
||||||
],
|
],
|
||||||
"tags": [
|
"tags": [
|
||||||
"add and modify"
|
"User"
|
||||||
],
|
],
|
||||||
"summary": "Updates an existing user",
|
"summary": "Updates an existing user",
|
||||||
"parameters": [
|
"parameters": [
|
||||||
@@ -820,12 +820,12 @@
|
|||||||
"ApiKeyAuth": []
|
"ApiKeyAuth": []
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"description": "Returns a JSON-encoded list of users.\nRequired query-parameter defines if all users or only users with additional special roles are returned.",
|
"description": "Returns a JSON-encoded list of users.\nRequired query-parameter defines if all users or only users with additional special roles are returned.\nOnly accessible from IPs registered with apiAllowedIPs configuration option.",
|
||||||
"produces": [
|
"produces": [
|
||||||
"application/json"
|
"application/json"
|
||||||
],
|
],
|
||||||
"tags": [
|
"tags": [
|
||||||
"query"
|
"User"
|
||||||
],
|
],
|
||||||
"summary": "Returns a list of users",
|
"summary": "Returns a list of users",
|
||||||
"parameters": [
|
"parameters": [
|
||||||
@@ -879,7 +879,7 @@
|
|||||||
"ApiKeyAuth": []
|
"ApiKeyAuth": []
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"description": "User specified in form data will be saved to database.",
|
"description": "User specified in form data will be saved to database.\nOnly accessible from IPs registered with apiAllowedIPs configuration option.",
|
||||||
"consumes": [
|
"consumes": [
|
||||||
"multipart/form-data"
|
"multipart/form-data"
|
||||||
],
|
],
|
||||||
@@ -887,7 +887,7 @@
|
|||||||
"text/plain"
|
"text/plain"
|
||||||
],
|
],
|
||||||
"tags": [
|
"tags": [
|
||||||
"add and modify"
|
"User"
|
||||||
],
|
],
|
||||||
"summary": "Adds a new user",
|
"summary": "Adds a new user",
|
||||||
"parameters": [
|
"parameters": [
|
||||||
@@ -983,7 +983,7 @@
|
|||||||
"ApiKeyAuth": []
|
"ApiKeyAuth": []
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"description": "User defined by username in form data will be deleted from database.",
|
"description": "User defined by username in form data will be deleted from database.\nOnly accessible from IPs registered with apiAllowedIPs configuration option.",
|
||||||
"consumes": [
|
"consumes": [
|
||||||
"multipart/form-data"
|
"multipart/form-data"
|
||||||
],
|
],
|
||||||
@@ -991,7 +991,7 @@
|
|||||||
"text/plain"
|
"text/plain"
|
||||||
],
|
],
|
||||||
"tags": [
|
"tags": [
|
||||||
"remove"
|
"User"
|
||||||
],
|
],
|
||||||
"summary": "Deletes a user",
|
"summary": "Deletes a user",
|
||||||
"parameters": [
|
"parameters": [
|
||||||
@@ -1757,10 +1757,5 @@
|
|||||||
"name": "X-Auth-Token",
|
"name": "X-Auth-Token",
|
||||||
"in": "header"
|
"in": "header"
|
||||||
}
|
}
|
||||||
},
|
|
||||||
"tags": [
|
|
||||||
{
|
|
||||||
"name": "Job API"
|
|
||||||
}
|
}
|
||||||
]
|
|
||||||
}
|
}
|
@@ -607,7 +607,7 @@ paths:
|
|||||||
- ApiKeyAuth: []
|
- ApiKeyAuth: []
|
||||||
summary: Lists all jobs
|
summary: Lists all jobs
|
||||||
tags:
|
tags:
|
||||||
- query
|
- Job query
|
||||||
/jobs/{id}:
|
/jobs/{id}:
|
||||||
post:
|
post:
|
||||||
consumes:
|
consumes:
|
||||||
@@ -665,7 +665,7 @@ paths:
|
|||||||
- ApiKeyAuth: []
|
- ApiKeyAuth: []
|
||||||
summary: Get complete job meta and metric data
|
summary: Get complete job meta and metric data
|
||||||
tags:
|
tags:
|
||||||
- query
|
- Job query
|
||||||
/jobs/delete_job/:
|
/jobs/delete_job/:
|
||||||
delete:
|
delete:
|
||||||
consumes:
|
consumes:
|
||||||
@@ -715,7 +715,7 @@ paths:
|
|||||||
- ApiKeyAuth: []
|
- ApiKeyAuth: []
|
||||||
summary: Remove a job from the sql database
|
summary: Remove a job from the sql database
|
||||||
tags:
|
tags:
|
||||||
- remove
|
- Job remove
|
||||||
/jobs/delete_job/{id}:
|
/jobs/delete_job/{id}:
|
||||||
delete:
|
delete:
|
||||||
description: Job to remove is specified by database ID. This will not remove
|
description: Job to remove is specified by database ID. This will not remove
|
||||||
@@ -762,7 +762,7 @@ paths:
|
|||||||
- ApiKeyAuth: []
|
- ApiKeyAuth: []
|
||||||
summary: Remove a job from the sql database
|
summary: Remove a job from the sql database
|
||||||
tags:
|
tags:
|
||||||
- remove
|
- Job remove
|
||||||
/jobs/delete_job_before/{ts}:
|
/jobs/delete_job_before/{ts}:
|
||||||
delete:
|
delete:
|
||||||
description: Remove all jobs with start time before timestamp. The jobs will
|
description: Remove all jobs with start time before timestamp. The jobs will
|
||||||
@@ -809,7 +809,7 @@ paths:
|
|||||||
- ApiKeyAuth: []
|
- ApiKeyAuth: []
|
||||||
summary: Remove a job from the sql database
|
summary: Remove a job from the sql database
|
||||||
tags:
|
tags:
|
||||||
- remove
|
- Job remove
|
||||||
/jobs/start_job/:
|
/jobs/start_job/:
|
||||||
post:
|
post:
|
||||||
consumes:
|
consumes:
|
||||||
@@ -856,7 +856,7 @@ paths:
|
|||||||
- ApiKeyAuth: []
|
- ApiKeyAuth: []
|
||||||
summary: Adds a new job as "running"
|
summary: Adds a new job as "running"
|
||||||
tags:
|
tags:
|
||||||
- add and modify
|
- Job add and modify
|
||||||
/jobs/stop_job/:
|
/jobs/stop_job/:
|
||||||
post:
|
post:
|
||||||
description: |-
|
description: |-
|
||||||
@@ -905,7 +905,7 @@ paths:
|
|||||||
- ApiKeyAuth: []
|
- ApiKeyAuth: []
|
||||||
summary: Marks job as completed and triggers archiving
|
summary: Marks job as completed and triggers archiving
|
||||||
tags:
|
tags:
|
||||||
- add and modify
|
- Job add and modify
|
||||||
/jobs/stop_job/{id}:
|
/jobs/stop_job/{id}:
|
||||||
post:
|
post:
|
||||||
consumes:
|
consumes:
|
||||||
@@ -961,7 +961,7 @@ paths:
|
|||||||
- ApiKeyAuth: []
|
- ApiKeyAuth: []
|
||||||
summary: Marks job as completed and triggers archiving
|
summary: Marks job as completed and triggers archiving
|
||||||
tags:
|
tags:
|
||||||
- add and modify
|
- Job add and modify
|
||||||
/jobs/tag_job/{id}:
|
/jobs/tag_job/{id}:
|
||||||
post:
|
post:
|
||||||
consumes:
|
consumes:
|
||||||
@@ -1010,7 +1010,7 @@ paths:
|
|||||||
- ApiKeyAuth: []
|
- ApiKeyAuth: []
|
||||||
summary: Adds one or more tags to a job
|
summary: Adds one or more tags to a job
|
||||||
tags:
|
tags:
|
||||||
- add and modify
|
- Job add and modify
|
||||||
/user/{id}:
|
/user/{id}:
|
||||||
post:
|
post:
|
||||||
consumes:
|
consumes:
|
||||||
@@ -1018,6 +1018,7 @@ paths:
|
|||||||
description: |-
|
description: |-
|
||||||
Modifies user defined by username (id) in one of four possible ways.
|
Modifies user defined by username (id) in one of four possible ways.
|
||||||
If more than one formValue is set then only the highest priority field is used.
|
If more than one formValue is set then only the highest priority field is used.
|
||||||
|
Only accessible from IPs registered with apiAllowedIPs configuration option.
|
||||||
parameters:
|
parameters:
|
||||||
- description: Database ID of User
|
- description: Database ID of User
|
||||||
in: path
|
in: path
|
||||||
@@ -1083,12 +1084,14 @@ paths:
|
|||||||
- ApiKeyAuth: []
|
- ApiKeyAuth: []
|
||||||
summary: Updates an existing user
|
summary: Updates an existing user
|
||||||
tags:
|
tags:
|
||||||
- add and modify
|
- User
|
||||||
/users/:
|
/users/:
|
||||||
delete:
|
delete:
|
||||||
consumes:
|
consumes:
|
||||||
- multipart/form-data
|
- multipart/form-data
|
||||||
description: User defined by username in form data will be deleted from database.
|
description: |-
|
||||||
|
User defined by username in form data will be deleted from database.
|
||||||
|
Only accessible from IPs registered with apiAllowedIPs configuration option.
|
||||||
parameters:
|
parameters:
|
||||||
- description: User ID to delete
|
- description: User ID to delete
|
||||||
in: formData
|
in: formData
|
||||||
@@ -1124,11 +1127,12 @@ paths:
|
|||||||
- ApiKeyAuth: []
|
- ApiKeyAuth: []
|
||||||
summary: Deletes a user
|
summary: Deletes a user
|
||||||
tags:
|
tags:
|
||||||
- remove
|
- User
|
||||||
get:
|
get:
|
||||||
description: |-
|
description: |-
|
||||||
Returns a JSON-encoded list of users.
|
Returns a JSON-encoded list of users.
|
||||||
Required query-parameter defines if all users or only users with additional special roles are returned.
|
Required query-parameter defines if all users or only users with additional special roles are returned.
|
||||||
|
Only accessible from IPs registered with apiAllowedIPs configuration option.
|
||||||
parameters:
|
parameters:
|
||||||
- description: If returned list should contain all users or only users with
|
- description: If returned list should contain all users or only users with
|
||||||
additional special roles
|
additional special roles
|
||||||
@@ -1165,11 +1169,13 @@ paths:
|
|||||||
- ApiKeyAuth: []
|
- ApiKeyAuth: []
|
||||||
summary: Returns a list of users
|
summary: Returns a list of users
|
||||||
tags:
|
tags:
|
||||||
- query
|
- User
|
||||||
post:
|
post:
|
||||||
consumes:
|
consumes:
|
||||||
- multipart/form-data
|
- multipart/form-data
|
||||||
description: User specified in form data will be saved to database.
|
description: |-
|
||||||
|
User specified in form data will be saved to database.
|
||||||
|
Only accessible from IPs registered with apiAllowedIPs configuration option.
|
||||||
parameters:
|
parameters:
|
||||||
- description: Unique user ID
|
- description: Unique user ID
|
||||||
in: formData
|
in: formData
|
||||||
@@ -1235,12 +1241,10 @@ paths:
|
|||||||
- ApiKeyAuth: []
|
- ApiKeyAuth: []
|
||||||
summary: Adds a new user
|
summary: Adds a new user
|
||||||
tags:
|
tags:
|
||||||
- add and modify
|
- User
|
||||||
securityDefinitions:
|
securityDefinitions:
|
||||||
ApiKeyAuth:
|
ApiKeyAuth:
|
||||||
in: header
|
in: header
|
||||||
name: X-Auth-Token
|
name: X-Auth-Token
|
||||||
type: apiKey
|
type: apiKey
|
||||||
swagger: "2.0"
|
swagger: "2.0"
|
||||||
tags:
|
|
||||||
- name: Job API
|
|
||||||
|
@@ -9,6 +9,7 @@ It is supported to set these by means of a `.env` file in the project root.
|
|||||||
## Configuration Options
|
## Configuration Options
|
||||||
|
|
||||||
* `addr`: Type string. Address where the http (or https) server will listen on (for example: 'localhost:80'). Default `:8080`.
|
* `addr`: Type string. Address where the http (or https) server will listen on (for example: 'localhost:80'). Default `:8080`.
|
||||||
|
* `apiAllowedIPs`: Type string array. Addresses from which the secured API endpoints (/users and other auth related endpoints) can be reached
|
||||||
* `user`: Type string. Drop root permissions once .env was read and the port was taken. Only applicable if using privileged port.
|
* `user`: Type string. Drop root permissions once .env was read and the port was taken. Only applicable if using privileged port.
|
||||||
* `group`: Type string. Drop root permissions once .env was read and the port was taken. Only applicable if using privileged port.
|
* `group`: Type string. Drop root permissions once .env was read and the port was taken. Only applicable if using privileged port.
|
||||||
* `disable-authentication`: Type bool. Disable authentication (for everything: API, Web-UI, ...). Default `false`.
|
* `disable-authentication`: Type bool. Disable authentication (for everything: API, Web-UI, ...). Default `false`.
|
||||||
|
@@ -5,7 +5,7 @@
|
|||||||
"path": "./var/job-archive"
|
"path": "./var/job-archive"
|
||||||
},
|
},
|
||||||
"jwts": {
|
"jwts": {
|
||||||
"max-age": "2m"
|
"max-age": "2000h"
|
||||||
},
|
},
|
||||||
"clusters": [
|
"clusters": [
|
||||||
{
|
{
|
||||||
|
@@ -43,7 +43,7 @@
|
|||||||
"jwts": {
|
"jwts": {
|
||||||
"cookieName": "",
|
"cookieName": "",
|
||||||
"validateUser": false,
|
"validateUser": false,
|
||||||
"max-age": "2m",
|
"max-age": "2000h",
|
||||||
"trustedIssuer": ""
|
"trustedIssuer": ""
|
||||||
},
|
},
|
||||||
"short-running-jobs-duration": 300
|
"short-running-jobs-duration": 300
|
||||||
|
@@ -1,4 +1,5 @@
|
|||||||
// Code generated by swaggo/swag. DO NOT EDIT
|
// Code generated by swaggo/swag. DO NOT EDIT.
|
||||||
|
|
||||||
package api
|
package api
|
||||||
|
|
||||||
import "github.com/swaggo/swag"
|
import "github.com/swaggo/swag"
|
||||||
@@ -35,7 +36,7 @@ const docTemplate = `{
|
|||||||
"application/json"
|
"application/json"
|
||||||
],
|
],
|
||||||
"tags": [
|
"tags": [
|
||||||
"query"
|
"Job query"
|
||||||
],
|
],
|
||||||
"summary": "Lists all jobs",
|
"summary": "Lists all jobs",
|
||||||
"parameters": [
|
"parameters": [
|
||||||
@@ -133,7 +134,7 @@ const docTemplate = `{
|
|||||||
"application/json"
|
"application/json"
|
||||||
],
|
],
|
||||||
"tags": [
|
"tags": [
|
||||||
"remove"
|
"Job remove"
|
||||||
],
|
],
|
||||||
"summary": "Remove a job from the sql database",
|
"summary": "Remove a job from the sql database",
|
||||||
"parameters": [
|
"parameters": [
|
||||||
@@ -205,7 +206,7 @@ const docTemplate = `{
|
|||||||
"application/json"
|
"application/json"
|
||||||
],
|
],
|
||||||
"tags": [
|
"tags": [
|
||||||
"remove"
|
"Job remove"
|
||||||
],
|
],
|
||||||
"summary": "Remove a job from the sql database",
|
"summary": "Remove a job from the sql database",
|
||||||
"parameters": [
|
"parameters": [
|
||||||
@@ -275,7 +276,7 @@ const docTemplate = `{
|
|||||||
"application/json"
|
"application/json"
|
||||||
],
|
],
|
||||||
"tags": [
|
"tags": [
|
||||||
"remove"
|
"Job remove"
|
||||||
],
|
],
|
||||||
"summary": "Remove a job from the sql database",
|
"summary": "Remove a job from the sql database",
|
||||||
"parameters": [
|
"parameters": [
|
||||||
@@ -348,7 +349,7 @@ const docTemplate = `{
|
|||||||
"application/json"
|
"application/json"
|
||||||
],
|
],
|
||||||
"tags": [
|
"tags": [
|
||||||
"add and modify"
|
"Job add and modify"
|
||||||
],
|
],
|
||||||
"summary": "Adds a new job as \"running\"",
|
"summary": "Adds a new job as \"running\"",
|
||||||
"parameters": [
|
"parameters": [
|
||||||
@@ -414,7 +415,7 @@ const docTemplate = `{
|
|||||||
"application/json"
|
"application/json"
|
||||||
],
|
],
|
||||||
"tags": [
|
"tags": [
|
||||||
"add and modify"
|
"Job add and modify"
|
||||||
],
|
],
|
||||||
"summary": "Marks job as completed and triggers archiving",
|
"summary": "Marks job as completed and triggers archiving",
|
||||||
"parameters": [
|
"parameters": [
|
||||||
@@ -489,7 +490,7 @@ const docTemplate = `{
|
|||||||
"application/json"
|
"application/json"
|
||||||
],
|
],
|
||||||
"tags": [
|
"tags": [
|
||||||
"add and modify"
|
"Job add and modify"
|
||||||
],
|
],
|
||||||
"summary": "Marks job as completed and triggers archiving",
|
"summary": "Marks job as completed and triggers archiving",
|
||||||
"parameters": [
|
"parameters": [
|
||||||
@@ -571,7 +572,7 @@ const docTemplate = `{
|
|||||||
"application/json"
|
"application/json"
|
||||||
],
|
],
|
||||||
"tags": [
|
"tags": [
|
||||||
"add and modify"
|
"Job add and modify"
|
||||||
],
|
],
|
||||||
"summary": "Adds one or more tags to a job",
|
"summary": "Adds one or more tags to a job",
|
||||||
"parameters": [
|
"parameters": [
|
||||||
@@ -644,7 +645,7 @@ const docTemplate = `{
|
|||||||
"application/json"
|
"application/json"
|
||||||
],
|
],
|
||||||
"tags": [
|
"tags": [
|
||||||
"query"
|
"Job query"
|
||||||
],
|
],
|
||||||
"summary": "Get complete job meta and metric data",
|
"summary": "Get complete job meta and metric data",
|
||||||
"parameters": [
|
"parameters": [
|
||||||
@@ -721,7 +722,7 @@ const docTemplate = `{
|
|||||||
"ApiKeyAuth": []
|
"ApiKeyAuth": []
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"description": "Modifies user defined by username (id) in one of four possible ways.\nIf more than one formValue is set then only the highest priority field is used.",
|
"description": "Modifies user defined by username (id) in one of four possible ways.\nIf more than one formValue is set then only the highest priority field is used.\nOnly accessible from IPs registered with apiAllowedIPs configuration option.",
|
||||||
"consumes": [
|
"consumes": [
|
||||||
"multipart/form-data"
|
"multipart/form-data"
|
||||||
],
|
],
|
||||||
@@ -729,7 +730,7 @@ const docTemplate = `{
|
|||||||
"text/plain"
|
"text/plain"
|
||||||
],
|
],
|
||||||
"tags": [
|
"tags": [
|
||||||
"add and modify"
|
"User"
|
||||||
],
|
],
|
||||||
"summary": "Updates an existing user",
|
"summary": "Updates an existing user",
|
||||||
"parameters": [
|
"parameters": [
|
||||||
@@ -826,12 +827,12 @@ const docTemplate = `{
|
|||||||
"ApiKeyAuth": []
|
"ApiKeyAuth": []
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"description": "Returns a JSON-encoded list of users.\nRequired query-parameter defines if all users or only users with additional special roles are returned.",
|
"description": "Returns a JSON-encoded list of users.\nRequired query-parameter defines if all users or only users with additional special roles are returned.\nOnly accessible from IPs registered with apiAllowedIPs configuration option.",
|
||||||
"produces": [
|
"produces": [
|
||||||
"application/json"
|
"application/json"
|
||||||
],
|
],
|
||||||
"tags": [
|
"tags": [
|
||||||
"query"
|
"User"
|
||||||
],
|
],
|
||||||
"summary": "Returns a list of users",
|
"summary": "Returns a list of users",
|
||||||
"parameters": [
|
"parameters": [
|
||||||
@@ -885,7 +886,7 @@ const docTemplate = `{
|
|||||||
"ApiKeyAuth": []
|
"ApiKeyAuth": []
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"description": "User specified in form data will be saved to database.",
|
"description": "User specified in form data will be saved to database.\nOnly accessible from IPs registered with apiAllowedIPs configuration option.",
|
||||||
"consumes": [
|
"consumes": [
|
||||||
"multipart/form-data"
|
"multipart/form-data"
|
||||||
],
|
],
|
||||||
@@ -893,7 +894,7 @@ const docTemplate = `{
|
|||||||
"text/plain"
|
"text/plain"
|
||||||
],
|
],
|
||||||
"tags": [
|
"tags": [
|
||||||
"add and modify"
|
"User"
|
||||||
],
|
],
|
||||||
"summary": "Adds a new user",
|
"summary": "Adds a new user",
|
||||||
"parameters": [
|
"parameters": [
|
||||||
@@ -989,7 +990,7 @@ const docTemplate = `{
|
|||||||
"ApiKeyAuth": []
|
"ApiKeyAuth": []
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"description": "User defined by username in form data will be deleted from database.",
|
"description": "User defined by username in form data will be deleted from database.\nOnly accessible from IPs registered with apiAllowedIPs configuration option.",
|
||||||
"consumes": [
|
"consumes": [
|
||||||
"multipart/form-data"
|
"multipart/form-data"
|
||||||
],
|
],
|
||||||
@@ -997,7 +998,7 @@ const docTemplate = `{
|
|||||||
"text/plain"
|
"text/plain"
|
||||||
],
|
],
|
||||||
"tags": [
|
"tags": [
|
||||||
"remove"
|
"User"
|
||||||
],
|
],
|
||||||
"summary": "Deletes a user",
|
"summary": "Deletes a user",
|
||||||
"parameters": [
|
"parameters": [
|
||||||
@@ -1763,12 +1764,7 @@ const docTemplate = `{
|
|||||||
"name": "X-Auth-Token",
|
"name": "X-Auth-Token",
|
||||||
"in": "header"
|
"in": "header"
|
||||||
}
|
}
|
||||||
},
|
|
||||||
"tags": [
|
|
||||||
{
|
|
||||||
"name": "Job API"
|
|
||||||
}
|
}
|
||||||
]
|
|
||||||
}`
|
}`
|
||||||
|
|
||||||
// SwaggerInfo holds exported Swagger Info so clients can modify it
|
// SwaggerInfo holds exported Swagger Info so clients can modify it
|
||||||
@@ -1781,6 +1777,8 @@ var SwaggerInfo = &swag.Spec{
|
|||||||
Description: "API for batch job control.",
|
Description: "API for batch job control.",
|
||||||
InfoInstanceName: "swagger",
|
InfoInstanceName: "swagger",
|
||||||
SwaggerTemplate: docTemplate,
|
SwaggerTemplate: docTemplate,
|
||||||
|
LeftDelim: "{{",
|
||||||
|
RightDelim: "}}",
|
||||||
}
|
}
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
|
@@ -37,8 +37,6 @@ import (
|
|||||||
// @version 1.0.0
|
// @version 1.0.0
|
||||||
// @description API for batch job control.
|
// @description API for batch job control.
|
||||||
|
|
||||||
// @tag.name Job API
|
|
||||||
|
|
||||||
// @contact.name ClusterCockpit Project
|
// @contact.name ClusterCockpit Project
|
||||||
// @contact.url https://github.com/ClusterCockpit
|
// @contact.url https://github.com/ClusterCockpit
|
||||||
// @contact.email support@clustercockpit.org
|
// @contact.email support@clustercockpit.org
|
||||||
@@ -212,6 +210,10 @@ func securedCheck(r *http.Request) error {
|
|||||||
IPAddress = r.RemoteAddr
|
IPAddress = r.RemoteAddr
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if strings.Contains(IPAddress, ":") {
|
||||||
|
IPAddress = strings.Split(IPAddress, ":")[0]
|
||||||
|
}
|
||||||
|
|
||||||
// check if IP is allowed
|
// check if IP is allowed
|
||||||
if !util.Contains(config.Keys.ApiAllowedIPs, IPAddress) {
|
if !util.Contains(config.Keys.ApiAllowedIPs, IPAddress) {
|
||||||
return fmt.Errorf("unknown ip: %v", IPAddress)
|
return fmt.Errorf("unknown ip: %v", IPAddress)
|
||||||
@@ -223,7 +225,7 @@ func securedCheck(r *http.Request) error {
|
|||||||
|
|
||||||
// getJobs godoc
|
// getJobs godoc
|
||||||
// @summary Lists all jobs
|
// @summary Lists all jobs
|
||||||
// @tags query
|
// @tags Job query
|
||||||
// @description Get a list of all jobs. Filters can be applied using query parameters.
|
// @description Get a list of all jobs. Filters can be applied using query parameters.
|
||||||
// @description Number of results can be limited by page. Results are sorted by descending startTime.
|
// @description Number of results can be limited by page. Results are sorted by descending startTime.
|
||||||
// @produce json
|
// @produce json
|
||||||
@@ -369,7 +371,7 @@ func (api *RestApi) getJobs(rw http.ResponseWriter, r *http.Request) {
|
|||||||
|
|
||||||
// getJobById godoc
|
// getJobById godoc
|
||||||
// @summary Get complete job meta and metric data
|
// @summary Get complete job meta and metric data
|
||||||
// @tags query
|
// @tags Job query
|
||||||
// @description Job to get is specified by database ID
|
// @description Job to get is specified by database ID
|
||||||
// @description Returns full job resource information according to 'JobMeta' scheme and all metrics according to 'JobData'.
|
// @description Returns full job resource information according to 'JobMeta' scheme and all metrics according to 'JobData'.
|
||||||
// @accept json
|
// @accept json
|
||||||
@@ -464,7 +466,7 @@ func (api *RestApi) getJobById(rw http.ResponseWriter, r *http.Request) {
|
|||||||
|
|
||||||
// tagJob godoc
|
// tagJob godoc
|
||||||
// @summary Adds one or more tags to a job
|
// @summary Adds one or more tags to a job
|
||||||
// @tags add and modify
|
// @tags Job add and modify
|
||||||
// @description Adds tag(s) to a job specified by DB ID. Name and Type of Tag(s) can be chosen freely.
|
// @description Adds tag(s) to a job specified by DB ID. Name and Type of Tag(s) can be chosen freely.
|
||||||
// @description If tagged job is already finished: Tag will be written directly to respective archive files.
|
// @description If tagged job is already finished: Tag will be written directly to respective archive files.
|
||||||
// @accept json
|
// @accept json
|
||||||
@@ -531,7 +533,7 @@ func (api *RestApi) tagJob(rw http.ResponseWriter, r *http.Request) {
|
|||||||
|
|
||||||
// startJob godoc
|
// startJob godoc
|
||||||
// @summary Adds a new job as "running"
|
// @summary Adds a new job as "running"
|
||||||
// @tags add and modify
|
// @tags Job add and modify
|
||||||
// @description Job specified in request body will be saved to database as "running" with new DB ID.
|
// @description Job specified in request body will be saved to database as "running" with new DB ID.
|
||||||
// @description Job specifications follow the 'JobMeta' scheme, API will fail to execute if requirements are not met.
|
// @description Job specifications follow the 'JobMeta' scheme, API will fail to execute if requirements are not met.
|
||||||
// @accept json
|
// @accept json
|
||||||
@@ -612,7 +614,7 @@ func (api *RestApi) startJob(rw http.ResponseWriter, r *http.Request) {
|
|||||||
|
|
||||||
// stopJobById godoc
|
// stopJobById godoc
|
||||||
// @summary Marks job as completed and triggers archiving
|
// @summary Marks job as completed and triggers archiving
|
||||||
// @tags add and modify
|
// @tags Job add and modify
|
||||||
// @description Job to stop is specified by database ID. Only stopTime and final state are required in request body.
|
// @description Job to stop is specified by database ID. Only stopTime and final state are required in request body.
|
||||||
// @description Returns full job resource information according to 'JobMeta' scheme.
|
// @description Returns full job resource information according to 'JobMeta' scheme.
|
||||||
// @accept json
|
// @accept json
|
||||||
@@ -669,7 +671,7 @@ func (api *RestApi) stopJobById(rw http.ResponseWriter, r *http.Request) {
|
|||||||
|
|
||||||
// stopJobByRequest godoc
|
// stopJobByRequest godoc
|
||||||
// @summary Marks job as completed and triggers archiving
|
// @summary Marks job as completed and triggers archiving
|
||||||
// @tags add and modify
|
// @tags Job add and modify
|
||||||
// @description Job to stop is specified by request body. All fields are required in this case.
|
// @description Job to stop is specified by request body. All fields are required in this case.
|
||||||
// @description Returns full job resource information according to 'JobMeta' scheme.
|
// @description Returns full job resource information according to 'JobMeta' scheme.
|
||||||
// @produce json
|
// @produce json
|
||||||
@@ -718,7 +720,7 @@ func (api *RestApi) stopJobByRequest(rw http.ResponseWriter, r *http.Request) {
|
|||||||
|
|
||||||
// deleteJobById godoc
|
// deleteJobById godoc
|
||||||
// @summary Remove a job from the sql database
|
// @summary Remove a job from the sql database
|
||||||
// @tags remove
|
// @tags Job remove
|
||||||
// @description Job to remove is specified by database ID. This will not remove the job from the job archive.
|
// @description Job to remove is specified by database ID. This will not remove the job from the job archive.
|
||||||
// @produce json
|
// @produce json
|
||||||
// @param id path int true "Database ID of Job"
|
// @param id path int true "Database ID of Job"
|
||||||
@@ -765,7 +767,7 @@ func (api *RestApi) deleteJobById(rw http.ResponseWriter, r *http.Request) {
|
|||||||
|
|
||||||
// deleteJobByRequest godoc
|
// deleteJobByRequest godoc
|
||||||
// @summary Remove a job from the sql database
|
// @summary Remove a job from the sql database
|
||||||
// @tags remove
|
// @tags Job remove
|
||||||
// @description Job to delete is specified by request body. All fields are required in this case.
|
// @description Job to delete is specified by request body. All fields are required in this case.
|
||||||
// @accept json
|
// @accept json
|
||||||
// @produce json
|
// @produce json
|
||||||
@@ -823,7 +825,7 @@ func (api *RestApi) deleteJobByRequest(rw http.ResponseWriter, r *http.Request)
|
|||||||
|
|
||||||
// deleteJobBefore godoc
|
// deleteJobBefore godoc
|
||||||
// @summary Remove a job from the sql database
|
// @summary Remove a job from the sql database
|
||||||
// @tags remove
|
// @tags Job remove
|
||||||
// @description Remove all jobs with start time before timestamp. The jobs will not be removed from the job archive.
|
// @description Remove all jobs with start time before timestamp. The jobs will not be removed from the job archive.
|
||||||
// @produce json
|
// @produce json
|
||||||
// @param ts path int true "Unix epoch timestamp"
|
// @param ts path int true "Unix epoch timestamp"
|
||||||
@@ -955,8 +957,9 @@ func (api *RestApi) getJobMetrics(rw http.ResponseWriter, r *http.Request) {
|
|||||||
|
|
||||||
// createUser godoc
|
// createUser godoc
|
||||||
// @summary Adds a new user
|
// @summary Adds a new user
|
||||||
// @tags add and modify
|
// @tags User
|
||||||
// @description User specified in form data will be saved to database.
|
// @description User specified in form data will be saved to database.
|
||||||
|
// @description Only accessible from IPs registered with apiAllowedIPs configuration option.
|
||||||
// @accept mpfd
|
// @accept mpfd
|
||||||
// @produce plain
|
// @produce plain
|
||||||
// @param username formData string true "Unique user ID"
|
// @param username formData string true "Unique user ID"
|
||||||
@@ -1022,8 +1025,9 @@ func (api *RestApi) createUser(rw http.ResponseWriter, r *http.Request) {
|
|||||||
|
|
||||||
// deleteUser godoc
|
// deleteUser godoc
|
||||||
// @summary Deletes a user
|
// @summary Deletes a user
|
||||||
// @tags remove
|
// @tags User
|
||||||
// @description User defined by username in form data will be deleted from database.
|
// @description User defined by username in form data will be deleted from database.
|
||||||
|
// @description Only accessible from IPs registered with apiAllowedIPs configuration option.
|
||||||
// @accept mpfd
|
// @accept mpfd
|
||||||
// @produce plain
|
// @produce plain
|
||||||
// @param username formData string true "User ID to delete"
|
// @param username formData string true "User ID to delete"
|
||||||
@@ -1058,9 +1062,10 @@ func (api *RestApi) deleteUser(rw http.ResponseWriter, r *http.Request) {
|
|||||||
|
|
||||||
// getUsers godoc
|
// getUsers godoc
|
||||||
// @summary Returns a list of users
|
// @summary Returns a list of users
|
||||||
// @tags query
|
// @tags User
|
||||||
// @description Returns a JSON-encoded list of users.
|
// @description Returns a JSON-encoded list of users.
|
||||||
// @description Required query-parameter defines if all users or only users with additional special roles are returned.
|
// @description Required query-parameter defines if all users or only users with additional special roles are returned.
|
||||||
|
// @description Only accessible from IPs registered with apiAllowedIPs configuration option.
|
||||||
// @produce json
|
// @produce json
|
||||||
// @param not-just-user query bool true "If returned list should contain all users or only users with additional special roles"
|
// @param not-just-user query bool true "If returned list should contain all users or only users with additional special roles"
|
||||||
// @success 200 {array} api.ApiReturnedUser "List of users returned successfully"
|
// @success 200 {array} api.ApiReturnedUser "List of users returned successfully"
|
||||||
@@ -1093,9 +1098,10 @@ func (api *RestApi) getUsers(rw http.ResponseWriter, r *http.Request) {
|
|||||||
|
|
||||||
// updateUser godoc
|
// updateUser godoc
|
||||||
// @summary Updates an existing user
|
// @summary Updates an existing user
|
||||||
// @tags add and modify
|
// @tags User
|
||||||
// @description Modifies user defined by username (id) in one of four possible ways.
|
// @description Modifies user defined by username (id) in one of four possible ways.
|
||||||
// @description If more than one formValue is set then only the highest priority field is used.
|
// @description If more than one formValue is set then only the highest priority field is used.
|
||||||
|
// @description Only accessible from IPs registered with apiAllowedIPs configuration option.
|
||||||
// @accept mpfd
|
// @accept mpfd
|
||||||
// @produce plain
|
// @produce plain
|
||||||
// @param id path string true "Database ID of User"
|
// @param id path string true "Database ID of User"
|
||||||
|
@@ -6,6 +6,7 @@ package auth
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"crypto/ed25519"
|
"crypto/ed25519"
|
||||||
|
"database/sql"
|
||||||
"encoding/base64"
|
"encoding/base64"
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
@@ -152,6 +153,22 @@ func (ja *JWTCookieSessionAuthenticator) Login(
|
|||||||
claims := token.Claims.(jwt.MapClaims)
|
claims := token.Claims.(jwt.MapClaims)
|
||||||
sub, _ := claims["sub"].(string)
|
sub, _ := claims["sub"].(string)
|
||||||
|
|
||||||
|
var roles []string
|
||||||
|
projects := make([]string, 0)
|
||||||
|
|
||||||
|
if jc.ValidateUser {
|
||||||
|
var err error
|
||||||
|
user, err = repository.GetUserRepository().GetUser(sub)
|
||||||
|
if err != nil && err != sql.ErrNoRows {
|
||||||
|
log.Errorf("Error while loading user '%v'", sub)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Deny any logins for unknown usernames
|
||||||
|
if user == nil {
|
||||||
|
log.Warn("Could not find user from JWT in internal database.")
|
||||||
|
return nil, errors.New("unknown user")
|
||||||
|
}
|
||||||
|
} else {
|
||||||
var name string
|
var name string
|
||||||
if wrap, ok := claims["name"].(map[string]interface{}); ok {
|
if wrap, ok := claims["name"].(map[string]interface{}); ok {
|
||||||
if vals, ok := wrap["values"].([]interface{}); ok {
|
if vals, ok := wrap["values"].([]interface{}); ok {
|
||||||
@@ -165,18 +182,6 @@ func (ja *JWTCookieSessionAuthenticator) Login(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
var roles []string
|
|
||||||
|
|
||||||
if jc.ValidateUser {
|
|
||||||
// Deny any logins for unknown usernames
|
|
||||||
if user == nil {
|
|
||||||
log.Warn("Could not find user from JWT in internal database.")
|
|
||||||
return nil, errors.New("unknown user")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Take user roles from database instead of trusting the JWT
|
|
||||||
roles = user.Roles
|
|
||||||
} else {
|
|
||||||
// Extract roles from JWT (if present)
|
// Extract roles from JWT (if present)
|
||||||
if rawroles, ok := claims["roles"].([]interface{}); ok {
|
if rawroles, ok := claims["roles"].([]interface{}); ok {
|
||||||
for _, rr := range rawroles {
|
for _, rr := range rawroles {
|
||||||
@@ -185,20 +190,6 @@ func (ja *JWTCookieSessionAuthenticator) Login(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
// (Ask browser to) Delete JWT cookie
|
|
||||||
deletedCookie := &http.Cookie{
|
|
||||||
Name: jc.CookieName,
|
|
||||||
Value: "",
|
|
||||||
Path: "/",
|
|
||||||
MaxAge: -1,
|
|
||||||
HttpOnly: true,
|
|
||||||
}
|
|
||||||
http.SetCookie(rw, deletedCookie)
|
|
||||||
|
|
||||||
if user == nil {
|
|
||||||
projects := make([]string, 0)
|
|
||||||
user = &schema.User{
|
user = &schema.User{
|
||||||
Username: sub,
|
Username: sub,
|
||||||
Name: name,
|
Name: name,
|
||||||
@@ -215,5 +206,15 @@ func (ja *JWTCookieSessionAuthenticator) Login(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// (Ask browser to) Delete JWT cookie
|
||||||
|
deletedCookie := &http.Cookie{
|
||||||
|
Name: jc.CookieName,
|
||||||
|
Value: "",
|
||||||
|
Path: "/",
|
||||||
|
MaxAge: -1,
|
||||||
|
HttpOnly: true,
|
||||||
|
}
|
||||||
|
http.SetCookie(rw, deletedCookie)
|
||||||
|
|
||||||
return user, nil
|
return user, nil
|
||||||
}
|
}
|
||||||
|
@@ -5,6 +5,7 @@
|
|||||||
package auth
|
package auth
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"database/sql"
|
||||||
"encoding/base64"
|
"encoding/base64"
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
@@ -78,6 +79,22 @@ func (ja *JWTSessionAuthenticator) Login(
|
|||||||
claims := token.Claims.(jwt.MapClaims)
|
claims := token.Claims.(jwt.MapClaims)
|
||||||
sub, _ := claims["sub"].(string)
|
sub, _ := claims["sub"].(string)
|
||||||
|
|
||||||
|
var roles []string
|
||||||
|
projects := make([]string, 0)
|
||||||
|
|
||||||
|
if config.Keys.JwtConfig.ValidateUser {
|
||||||
|
var err error
|
||||||
|
user, err = repository.GetUserRepository().GetUser(sub)
|
||||||
|
if err != nil && err != sql.ErrNoRows {
|
||||||
|
log.Errorf("Error while loading user '%v'", sub)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Deny any logins for unknown usernames
|
||||||
|
if user == nil {
|
||||||
|
log.Warn("Could not find user from JWT in internal database.")
|
||||||
|
return nil, errors.New("unknown user")
|
||||||
|
}
|
||||||
|
} else {
|
||||||
var name string
|
var name string
|
||||||
if wrap, ok := claims["name"].(map[string]interface{}); ok {
|
if wrap, ok := claims["name"].(map[string]interface{}); ok {
|
||||||
if vals, ok := wrap["values"].([]interface{}); ok {
|
if vals, ok := wrap["values"].([]interface{}); ok {
|
||||||
@@ -91,18 +108,6 @@ func (ja *JWTSessionAuthenticator) Login(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
var roles []string
|
|
||||||
|
|
||||||
if config.Keys.JwtConfig.ValidateUser {
|
|
||||||
// Deny any logins for unknown usernames
|
|
||||||
if user == nil {
|
|
||||||
log.Warn("Could not find user from JWT in internal database.")
|
|
||||||
return nil, errors.New("unknown user")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Take user roles from database instead of trusting the JWT
|
|
||||||
roles = user.Roles
|
|
||||||
} else {
|
|
||||||
// Extract roles from JWT (if present)
|
// Extract roles from JWT (if present)
|
||||||
if rawroles, ok := claims["roles"].([]interface{}); ok {
|
if rawroles, ok := claims["roles"].([]interface{}); ok {
|
||||||
for _, rr := range rawroles {
|
for _, rr := range rawroles {
|
||||||
@@ -113,23 +118,17 @@ func (ja *JWTSessionAuthenticator) Login(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if rawprojs, ok := claims["projects"].([]interface{}); ok {
|
||||||
|
for _, pp := range rawprojs {
|
||||||
|
if p, ok := pp.(string); ok {
|
||||||
|
projects = append(projects, p)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if rawprojs, ok := claims["projects"]; ok {
|
||||||
|
projects = append(projects, rawprojs.([]string)...)
|
||||||
}
|
}
|
||||||
|
|
||||||
projects := make([]string, 0)
|
|
||||||
// Java/Grails Issued Token
|
|
||||||
// if rawprojs, ok := claims["projects"].([]interface{}); ok {
|
|
||||||
// for _, pp := range rawprojs {
|
|
||||||
// if p, ok := pp.(string); ok {
|
|
||||||
// projects = append(projects, p)
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
// } else if rawprojs, ok := claims["projects"]; ok {
|
|
||||||
// for _, p := range rawprojs.([]string) {
|
|
||||||
// projects = append(projects, p)
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
|
|
||||||
if user == nil {
|
|
||||||
user = &schema.User{
|
user = &schema.User{
|
||||||
Username: sub,
|
Username: sub,
|
||||||
Name: name,
|
Name: name,
|
||||||
|
@@ -70,28 +70,30 @@ func (r *JobRepository) buildStatsQuery(
|
|||||||
var query sq.SelectBuilder
|
var query sq.SelectBuilder
|
||||||
castType := r.getCastType()
|
castType := r.getCastType()
|
||||||
|
|
||||||
|
// fmt.Sprintf(`CAST(ROUND((CASE WHEN job.job_state = "running" THEN %d - job.start_time ELSE job.duration END) / 3600) as %s) as value`, time.Now().Unix(), castType)
|
||||||
|
|
||||||
if col != "" {
|
if col != "" {
|
||||||
// Scan columns: id, totalJobs, totalWalltime, totalNodes, totalNodeHours, totalCores, totalCoreHours, totalAccs, totalAccHours
|
// Scan columns: id, totalJobs, totalWalltime, totalNodes, totalNodeHours, totalCores, totalCoreHours, totalAccs, totalAccHours
|
||||||
query = sq.Select(col, "COUNT(job.id) as totalJobs",
|
query = sq.Select(col, "COUNT(job.id) as totalJobs",
|
||||||
fmt.Sprintf("CAST(ROUND(SUM(job.duration) / 3600) as %s) as totalWalltime", castType),
|
fmt.Sprintf(`CAST(ROUND(SUM((CASE WHEN job.job_state = "running" THEN %d - job.start_time ELSE job.duration END)) / 3600) as %s) as totalWalltime`, time.Now().Unix(), castType),
|
||||||
fmt.Sprintf("CAST(SUM(job.num_nodes) as %s) as totalNodes", castType),
|
fmt.Sprintf(`CAST(SUM(job.num_nodes) as %s) as totalNodes`, castType),
|
||||||
fmt.Sprintf("CAST(ROUND(SUM(job.duration * job.num_nodes) / 3600) as %s) as totalNodeHours", castType),
|
fmt.Sprintf(`CAST(ROUND(SUM((CASE WHEN job.job_state = "running" THEN %d - job.start_time ELSE job.duration END) * job.num_nodes) / 3600) as %s) as totalNodeHours`, time.Now().Unix(), castType),
|
||||||
fmt.Sprintf("CAST(SUM(job.num_hwthreads) as %s) as totalCores", castType),
|
fmt.Sprintf(`CAST(SUM(job.num_hwthreads) as %s) as totalCores`, castType),
|
||||||
fmt.Sprintf("CAST(ROUND(SUM(job.duration * job.num_hwthreads) / 3600) as %s) as totalCoreHours", castType),
|
fmt.Sprintf(`CAST(ROUND(SUM((CASE WHEN job.job_state = "running" THEN %d - job.start_time ELSE job.duration END) * job.num_hwthreads) / 3600) as %s) as totalCoreHours`, time.Now().Unix(), castType),
|
||||||
fmt.Sprintf("CAST(SUM(job.num_acc) as %s) as totalAccs", castType),
|
fmt.Sprintf(`CAST(SUM(job.num_acc) as %s) as totalAccs`, castType),
|
||||||
fmt.Sprintf("CAST(ROUND(SUM(job.duration * job.num_acc) / 3600) as %s) as totalAccHours", castType),
|
fmt.Sprintf(`CAST(ROUND(SUM((CASE WHEN job.job_state = "running" THEN %d - job.start_time ELSE job.duration END) * job.num_acc) / 3600) as %s) as totalAccHours`, time.Now().Unix(), castType),
|
||||||
).From("job").GroupBy(col)
|
).From("job").GroupBy(col)
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
// Scan columns: totalJobs, totalWalltime, totalNodes, totalNodeHours, totalCores, totalCoreHours, totalAccs, totalAccHours
|
// Scan columns: totalJobs, totalWalltime, totalNodes, totalNodeHours, totalCores, totalCoreHours, totalAccs, totalAccHours
|
||||||
query = sq.Select("COUNT(job.id)",
|
query = sq.Select("COUNT(job.id)",
|
||||||
fmt.Sprintf("CAST(ROUND(SUM(job.duration) / 3600) as %s)", castType),
|
fmt.Sprintf(`CAST(ROUND(SUM((CASE WHEN job.job_state = "running" THEN %d - job.start_time ELSE job.duration END)) / 3600) as %s)`, time.Now().Unix(), castType),
|
||||||
fmt.Sprintf("CAST(SUM(job.num_nodes) as %s)", castType),
|
fmt.Sprintf(`CAST(SUM(job.num_nodes) as %s)`, castType),
|
||||||
fmt.Sprintf("CAST(ROUND(SUM(job.duration * job.num_nodes) / 3600) as %s)", castType),
|
fmt.Sprintf(`CAST(ROUND(SUM((CASE WHEN job.job_state = "running" THEN %d - job.start_time ELSE job.duration END) * job.num_nodes) / 3600) as %s)`, time.Now().Unix(), castType),
|
||||||
fmt.Sprintf("CAST(SUM(job.num_hwthreads) as %s)", castType),
|
fmt.Sprintf(`CAST(SUM(job.num_hwthreads) as %s)`, castType),
|
||||||
fmt.Sprintf("CAST(ROUND(SUM(job.duration * job.num_hwthreads) / 3600) as %s)", castType),
|
fmt.Sprintf(`CAST(ROUND(SUM((CASE WHEN job.job_state = "running" THEN %d - job.start_time ELSE job.duration END) * job.num_hwthreads) / 3600) as %s)`, time.Now().Unix(), castType),
|
||||||
fmt.Sprintf("CAST(SUM(job.num_acc) as %s)", castType),
|
fmt.Sprintf(`CAST(SUM(job.num_acc) as %s)`, castType),
|
||||||
fmt.Sprintf("CAST(ROUND(SUM(job.duration * job.num_acc) / 3600) as %s)", castType),
|
fmt.Sprintf(`CAST(ROUND(SUM((CASE WHEN job.job_state = "running" THEN %d - job.start_time ELSE job.duration END) * job.num_acc) / 3600) as %s)`, time.Now().Unix(), castType),
|
||||||
).From("job")
|
).From("job")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -76,7 +76,7 @@ type ProgramConfig struct {
|
|||||||
// Address where the http (or https) server will listen on (for example: 'localhost:80').
|
// Address where the http (or https) server will listen on (for example: 'localhost:80').
|
||||||
Addr string `json:"addr"`
|
Addr string `json:"addr"`
|
||||||
|
|
||||||
// Addresses from which the /api/secured/* API endpoints can be reached
|
// Addresses from which secured API endpoints can be reached
|
||||||
ApiAllowedIPs []string `json:"apiAllowedIPs"`
|
ApiAllowedIPs []string `json:"apiAllowedIPs"`
|
||||||
|
|
||||||
// Drop root permissions once .env was read and the port was taken.
|
// Drop root permissions once .env was read and the port was taken.
|
||||||
|
7
web/frontend/package-lock.json
generated
7
web/frontend/package-lock.json
generated
@@ -22,6 +22,7 @@
|
|||||||
"@rollup/plugin-commonjs": "^24.1.0",
|
"@rollup/plugin-commonjs": "^24.1.0",
|
||||||
"@rollup/plugin-node-resolve": "^15.0.2",
|
"@rollup/plugin-node-resolve": "^15.0.2",
|
||||||
"@rollup/plugin-terser": "^0.4.1",
|
"@rollup/plugin-terser": "^0.4.1",
|
||||||
|
"@timohausmann/quadtree-js": "^1.2.5",
|
||||||
"rollup": "^3.21.0",
|
"rollup": "^3.21.0",
|
||||||
"rollup-plugin-css-only": "^4.3.0",
|
"rollup-plugin-css-only": "^4.3.0",
|
||||||
"rollup-plugin-svelte": "^7.1.4",
|
"rollup-plugin-svelte": "^7.1.4",
|
||||||
@@ -225,6 +226,12 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/@timohausmann/quadtree-js": {
|
||||||
|
"version": "1.2.5",
|
||||||
|
"resolved": "https://registry.npmjs.org/@timohausmann/quadtree-js/-/quadtree-js-1.2.5.tgz",
|
||||||
|
"integrity": "sha512-WcH3pouYtpyLjTCRvNP0WuSV4m7mRyYhLzW44egveFryT7pJhpDsdIJASEe37iCFNA0vmEpqTYGoG0siyXEthA==",
|
||||||
|
"dev": true
|
||||||
|
},
|
||||||
"node_modules/@types/estree": {
|
"node_modules/@types/estree": {
|
||||||
"version": "1.0.1",
|
"version": "1.0.1",
|
||||||
"resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.1.tgz",
|
||||||
|
@@ -10,6 +10,7 @@
|
|||||||
"@rollup/plugin-commonjs": "^24.1.0",
|
"@rollup/plugin-commonjs": "^24.1.0",
|
||||||
"@rollup/plugin-node-resolve": "^15.0.2",
|
"@rollup/plugin-node-resolve": "^15.0.2",
|
||||||
"@rollup/plugin-terser": "^0.4.1",
|
"@rollup/plugin-terser": "^0.4.1",
|
||||||
|
"@timohausmann/quadtree-js": "^1.2.5",
|
||||||
"rollup": "^3.21.0",
|
"rollup": "^3.21.0",
|
||||||
"rollup-plugin-css-only": "^4.3.0",
|
"rollup-plugin-css-only": "^4.3.0",
|
||||||
"rollup-plugin-svelte": "^7.1.4",
|
"rollup-plugin-svelte": "^7.1.4",
|
||||||
|
@@ -10,7 +10,7 @@
|
|||||||
import { binsFromFootprint } from './utils.js'
|
import { binsFromFootprint } from './utils.js'
|
||||||
import ScatterPlot from './plots/Scatter.svelte'
|
import ScatterPlot from './plots/Scatter.svelte'
|
||||||
import PlotTable from './PlotTable.svelte'
|
import PlotTable from './PlotTable.svelte'
|
||||||
import Roofline from './plots/Roofline.svelte'
|
import RooflineHeatmap from './plots/RooflineHeatmap.svelte'
|
||||||
|
|
||||||
const { query: initq } = init()
|
const { query: initq } = init()
|
||||||
|
|
||||||
@@ -293,7 +293,7 @@
|
|||||||
{#each $topQuery.data.topList as te, i}
|
{#each $topQuery.data.topList as te, i}
|
||||||
<tr>
|
<tr>
|
||||||
<td><Icon name="circle-fill" style="color: {colors[i]};"/></td>
|
<td><Icon name="circle-fill" style="color: {colors[i]};"/></td>
|
||||||
{#if groupSelection.key == 'User'}
|
{#if groupSelection.key == 'user'}
|
||||||
<th scope="col"><a href="/monitoring/user/{te.id}?cluster={cluster.name}">{te.id}</a></th>
|
<th scope="col"><a href="/monitoring/user/{te.id}?cluster={cluster.name}">{te.id}</a></th>
|
||||||
{:else}
|
{:else}
|
||||||
<th scope="col"><a href="/monitoring/jobs/?cluster={cluster.name}&project={te.id}&projectMatch=eq">{te.id}</a></th>
|
<th scope="col"><a href="/monitoring/jobs/?cluster={cluster.name}&project={te.id}&projectMatch=eq">{te.id}</a></th>
|
||||||
@@ -315,7 +315,7 @@
|
|||||||
{:else if $rooflineQuery.data && cluster}
|
{:else if $rooflineQuery.data && cluster}
|
||||||
<div bind:clientWidth={colWidth2}>
|
<div bind:clientWidth={colWidth2}>
|
||||||
{#key $rooflineQuery.data}
|
{#key $rooflineQuery.data}
|
||||||
<Roofline
|
<RooflineHeatmap
|
||||||
width={colWidth2} height={300}
|
width={colWidth2} height={300}
|
||||||
tiles={$rooflineQuery.data.rooflineHeatmap}
|
tiles={$rooflineQuery.data.rooflineHeatmap}
|
||||||
cluster={cluster.subClusters.length == 1 ? cluster.subClusters[0] : null}
|
cluster={cluster.subClusters.length == 1 ? cluster.subClusters[0] : null}
|
||||||
|
@@ -4,6 +4,7 @@
|
|||||||
groupByScope,
|
groupByScope,
|
||||||
fetchMetricsStore,
|
fetchMetricsStore,
|
||||||
checkMetricDisabled,
|
checkMetricDisabled,
|
||||||
|
transformDataForRoofline
|
||||||
} from "./utils.js";
|
} from "./utils.js";
|
||||||
import {
|
import {
|
||||||
Row,
|
Row,
|
||||||
@@ -131,7 +132,6 @@
|
|||||||
|
|
||||||
let plots = {},
|
let plots = {},
|
||||||
jobTags,
|
jobTags,
|
||||||
fullWidth,
|
|
||||||
statsTable;
|
statsTable;
|
||||||
$: document.title = $initq.fetching
|
$: document.title = $initq.fetching
|
||||||
? "Loading..."
|
? "Loading..."
|
||||||
@@ -190,7 +190,6 @@
|
|||||||
}));
|
}));
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<div class="row" bind:clientWidth={fullWidth} />
|
|
||||||
<Row>
|
<Row>
|
||||||
<Col>
|
<Col>
|
||||||
{#if $initq.error}
|
{#if $initq.error}
|
||||||
@@ -245,7 +244,6 @@
|
|||||||
{/if}
|
{/if}
|
||||||
<Col>
|
<Col>
|
||||||
<Polar
|
<Polar
|
||||||
size={fullWidth / 4.1}
|
|
||||||
metrics={ccconfig[
|
metrics={ccconfig[
|
||||||
`job_view_polarPlotMetrics:${$initq.data.job.cluster}`
|
`job_view_polarPlotMetrics:${$initq.data.job.cluster}`
|
||||||
] || ccconfig[`job_view_polarPlotMetrics`]}
|
] || ccconfig[`job_view_polarPlotMetrics`]}
|
||||||
@@ -255,19 +253,18 @@
|
|||||||
</Col>
|
</Col>
|
||||||
<Col>
|
<Col>
|
||||||
<Roofline
|
<Roofline
|
||||||
width={fullWidth / 3 - 10}
|
renderTime={true}
|
||||||
height={fullWidth / 5}
|
|
||||||
cluster={clusters
|
cluster={clusters
|
||||||
.find((c) => c.name == $initq.data.job.cluster)
|
.find((c) => c.name == $initq.data.job.cluster)
|
||||||
.subClusters.find(
|
.subClusters.find(
|
||||||
(sc) => sc.name == $initq.data.job.subCluster
|
(sc) => sc.name == $initq.data.job.subCluster
|
||||||
)}
|
)}
|
||||||
flopsAny={$jobMetrics.data.jobMetrics.find(
|
data={
|
||||||
(m) => m.name == "flops_any" && m.scope == "node"
|
transformDataForRoofline (
|
||||||
)}
|
$jobMetrics.data.jobMetrics.find((m) => m.name == "flops_any" && m.scope == "node").metric,
|
||||||
memBw={$jobMetrics.data.jobMetrics.find(
|
$jobMetrics.data.jobMetrics.find((m) => m.name == "mem_bw" && m.scope == "node").metric
|
||||||
(m) => m.name == "mem_bw" && m.scope == "node"
|
)
|
||||||
)}
|
}
|
||||||
/>
|
/>
|
||||||
</Col>
|
</Col>
|
||||||
{:else}
|
{:else}
|
||||||
@@ -275,8 +272,7 @@
|
|||||||
<Col />
|
<Col />
|
||||||
{/if}
|
{/if}
|
||||||
</Row>
|
</Row>
|
||||||
<br />
|
<Row class="mb-3">
|
||||||
<Row>
|
|
||||||
<Col xs="auto">
|
<Col xs="auto">
|
||||||
{#if $initq.data}
|
{#if $initq.data}
|
||||||
<TagManagement job={$initq.data.job} bind:jobTags />
|
<TagManagement job={$initq.data.job} bind:jobTags />
|
||||||
@@ -293,7 +289,6 @@
|
|||||||
<Zoom timeseriesPlots={plots} />
|
<Zoom timeseriesPlots={plots} />
|
||||||
</Col> -->
|
</Col> -->
|
||||||
</Row>
|
</Row>
|
||||||
<br />
|
|
||||||
<Row>
|
<Row>
|
||||||
<Col>
|
<Col>
|
||||||
{#if $jobMetrics.error}
|
{#if $jobMetrics.error}
|
||||||
@@ -340,8 +335,7 @@
|
|||||||
{/if}
|
{/if}
|
||||||
</Col>
|
</Col>
|
||||||
</Row>
|
</Row>
|
||||||
<br />
|
<Row class="mt-2">
|
||||||
<Row>
|
|
||||||
<Col>
|
<Col>
|
||||||
{#if $initq.data}
|
{#if $initq.data}
|
||||||
<TabContent>
|
<TabContent>
|
||||||
|
@@ -1,7 +1,7 @@
|
|||||||
<script>
|
<script>
|
||||||
import { getContext } from "svelte";
|
import { getContext } from "svelte";
|
||||||
import Refresher from "./joblist/Refresher.svelte";
|
import Refresher from "./joblist/Refresher.svelte";
|
||||||
import Roofline, { transformPerNodeData } from "./plots/Roofline.svelte";
|
import Roofline from "./plots/Roofline.svelte";
|
||||||
import Pie, { colors } from "./plots/Pie.svelte";
|
import Pie, { colors } from "./plots/Pie.svelte";
|
||||||
import Histogram from "./plots/Histogram.svelte";
|
import Histogram from "./plots/Histogram.svelte";
|
||||||
import {
|
import {
|
||||||
@@ -16,7 +16,7 @@
|
|||||||
Progress,
|
Progress,
|
||||||
Icon,
|
Icon,
|
||||||
} from "sveltestrap";
|
} from "sveltestrap";
|
||||||
import { init, convert2uplot } from "./utils.js";
|
import { init, convert2uplot, transformPerNodeDataForRoofline } from "./utils.js";
|
||||||
import { scaleNumbers } from "./units.js";
|
import { scaleNumbers } from "./units.js";
|
||||||
import {
|
import {
|
||||||
queryStore,
|
queryStore,
|
||||||
@@ -31,8 +31,8 @@
|
|||||||
export let cluster;
|
export let cluster;
|
||||||
|
|
||||||
let plotWidths = [],
|
let plotWidths = [],
|
||||||
colWidth1 = 0,
|
colWidth1,
|
||||||
colWidth2;
|
colWidth2
|
||||||
let from = new Date(Date.now() - 5 * 60 * 1000),
|
let from = new Date(Date.now() - 5 * 60 * 1000),
|
||||||
to = new Date(Date.now());
|
to = new Date(Date.now());
|
||||||
const topOptions = [
|
const topOptions = [
|
||||||
@@ -427,16 +427,17 @@
|
|||||||
<div bind:clientWidth={plotWidths[i]}>
|
<div bind:clientWidth={plotWidths[i]}>
|
||||||
{#key $mainQuery.data.nodeMetrics}
|
{#key $mainQuery.data.nodeMetrics}
|
||||||
<Roofline
|
<Roofline
|
||||||
|
allowSizeChange={true}
|
||||||
width={plotWidths[i] - 10}
|
width={plotWidths[i] - 10}
|
||||||
height={300}
|
height={300}
|
||||||
colorDots={true}
|
|
||||||
showTime={false}
|
|
||||||
cluster={subCluster}
|
cluster={subCluster}
|
||||||
data={transformPerNodeData(
|
data={
|
||||||
|
transformPerNodeDataForRoofline(
|
||||||
$mainQuery.data.nodeMetrics.filter(
|
$mainQuery.data.nodeMetrics.filter(
|
||||||
(data) => data.subCluster == subCluster.name
|
(data) => data.subCluster == subCluster.name
|
||||||
)
|
)
|
||||||
)}
|
)
|
||||||
|
}
|
||||||
/>
|
/>
|
||||||
{/key}
|
{/key}
|
||||||
</div>
|
</div>
|
||||||
@@ -444,7 +445,7 @@
|
|||||||
</Row>
|
</Row>
|
||||||
{/each}
|
{/each}
|
||||||
|
|
||||||
<hr style="margin-top: -1em;" />
|
<hr/>
|
||||||
|
|
||||||
<!-- Usage Stats as Histograms -->
|
<!-- Usage Stats as Histograms -->
|
||||||
|
|
||||||
|
@@ -22,10 +22,10 @@
|
|||||||
LineElement
|
LineElement
|
||||||
);
|
);
|
||||||
|
|
||||||
export let size
|
|
||||||
export let metrics
|
export let metrics
|
||||||
export let cluster
|
export let cluster
|
||||||
export let jobMetrics
|
export let jobMetrics
|
||||||
|
export let height = 365
|
||||||
|
|
||||||
const metricConfig = getContext('metrics')
|
const metricConfig = getContext('metrics')
|
||||||
|
|
||||||
@@ -89,13 +89,19 @@
|
|||||||
// No custom defined options but keep for clarity
|
// No custom defined options but keep for clarity
|
||||||
const options = {
|
const options = {
|
||||||
maintainAspectRatio: false,
|
maintainAspectRatio: false,
|
||||||
animation: false
|
animation: false,
|
||||||
|
scales: { // fix scale
|
||||||
|
r: {
|
||||||
|
suggestedMin: 0.0,
|
||||||
|
suggestedMax: 1.0
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<div class="chart-container">
|
<div class="chart-container">
|
||||||
<Radar {data} {options} width={size} height={size}/>
|
<Radar {data} {options} {height}/>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<style>
|
<style>
|
||||||
|
@@ -1,42 +1,54 @@
|
|||||||
<div class="cc-plot">
|
<script>
|
||||||
<canvas bind:this={canvasElement} width="{prevWidth}" height="{prevHeight}"></canvas>
|
import uPlot from 'uplot'
|
||||||
</div>
|
import { formatNumber } from '../units.js'
|
||||||
|
import { onMount, onDestroy } from 'svelte'
|
||||||
|
import { Card } from 'sveltestrap'
|
||||||
|
|
||||||
<script context="module">
|
export let data = null
|
||||||
const axesColor = '#aaaaaa'
|
export let renderTime = false
|
||||||
const tickFontSize = 10
|
export let allowSizeChange = false
|
||||||
const labelFontSize = 12
|
export let cluster = null
|
||||||
const fontFamily = 'system-ui, -apple-system, "Segoe UI", Roboto, "Helvetica Neue", Arial, "Noto Sans", sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol", "Noto Color Emoji"'
|
export let width = 600
|
||||||
const paddingLeft = 40,
|
export let height = 350
|
||||||
paddingRight = 10,
|
|
||||||
paddingTop = 10,
|
|
||||||
paddingBottom = 50
|
|
||||||
|
|
||||||
|
let plotWrapper = null
|
||||||
|
let uplot = null
|
||||||
|
let timeoutId = null
|
||||||
|
|
||||||
|
const lineWidth = clusterCockpitConfig.plot_general_lineWidth
|
||||||
|
|
||||||
|
/* Data Format
|
||||||
|
* data = [null, [], []] // 0: null-axis required for scatter, 1: Array of XY-Array for Scatter, 2: Optional Time Info
|
||||||
|
* data[1][0] = [100, 200, 500, ...] // X Axis -> Intensity (Vals up to clusters' flopRateScalar value)
|
||||||
|
* data[1][1] = [1000, 2000, 1500, ...] // Y Axis -> Performance (Vals up to clusters' flopRateSimd value)
|
||||||
|
* data[2] = [0.1, 0.15, 0.2, ...] // Color Code -> Time Information (Floats from 0 to 1) (Optional)
|
||||||
|
*/
|
||||||
|
|
||||||
|
// Helpers
|
||||||
function getGradientR(x) {
|
function getGradientR(x) {
|
||||||
if (x < 0.5) return 0
|
if (x < 0.5) return 0
|
||||||
if (x > 0.75) return 255
|
if (x > 0.75) return 255
|
||||||
x = (x - 0.5) * 4.0
|
x = (x - 0.5) * 4.0
|
||||||
return Math.floor(x * 255.0)
|
return Math.floor(x * 255.0)
|
||||||
}
|
}
|
||||||
|
|
||||||
function getGradientG(x) {
|
function getGradientG(x) {
|
||||||
if (x > 0.25 && x < 0.75) return 255
|
if (x > 0.25 && x < 0.75) return 255
|
||||||
if (x < 0.25) x = x * 4.0
|
if (x < 0.25) x = x * 4.0
|
||||||
else x = 1.0 - (x - 0.75) * 4.0
|
else x = 1.0 - (x - 0.75) * 4.0
|
||||||
return Math.floor(x * 255.0)
|
return Math.floor(x * 255.0)
|
||||||
}
|
}
|
||||||
|
|
||||||
function getGradientB(x) {
|
function getGradientB(x) {
|
||||||
if (x < 0.25) return 255
|
if (x < 0.25) return 255
|
||||||
if (x > 0.5) return 0
|
if (x > 0.5) return 0
|
||||||
x = 1.0 - (x - 0.25) * 4.0
|
x = 1.0 - (x - 0.25) * 4.0
|
||||||
return Math.floor(x * 255.0)
|
return Math.floor(x * 255.0)
|
||||||
}
|
}
|
||||||
|
|
||||||
function getRGB(c) {
|
function getRGB(c) {
|
||||||
return `rgb(${getGradientR(c)}, ${getGradientG(c)}, ${getGradientB(c)})`
|
return `rgb(${getGradientR(c)}, ${getGradientG(c)}, ${getGradientB(c)})`
|
||||||
}
|
}
|
||||||
|
function nearestThousand (num) {
|
||||||
|
return Math.ceil(num/1000) * 1000
|
||||||
|
}
|
||||||
function lineIntersect(x1, y1, x2, y2, x3, y3, x4, y4) {
|
function lineIntersect(x1, y1, x2, y2, x3, y3, x4, y4) {
|
||||||
let l = (y4 - y3) * (x2 - x1) - (x4 - x3) * (y2 - y1)
|
let l = (y4 - y3) * (x2 - x1) - (x4 - x3) * (y2 - y1)
|
||||||
let a = ((x4 - x3) * (y1 - y3) - (y4 - y3) * (x1 - x3)) / l
|
let a = ((x4 - x3) * (y1 - y3) - (y4 - y3) * (x1 - x3)) / l
|
||||||
@@ -45,314 +57,197 @@
|
|||||||
y: y1 + a * (y2 - y1)
|
y: y1 + a * (y2 - y1)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
// End Helpers
|
||||||
|
|
||||||
function axisStepFactor(i, size) {
|
// Dot Renderers
|
||||||
if (size && size < 500)
|
const drawColorPoints = (u, seriesIdx, idx0, idx1) => {
|
||||||
return 10
|
const size = 5 * devicePixelRatio;
|
||||||
|
uPlot.orient(u, seriesIdx, (series, dataX, dataY, scaleX, scaleY, valToPosX, valToPosY, xOff, yOff, xDim, yDim, moveTo, lineTo, rect, arc) => {
|
||||||
|
let d = u.data[seriesIdx];
|
||||||
|
let deg360 = 2 * Math.PI;
|
||||||
|
for (let i = 0; i < d[0].length; i++) {
|
||||||
|
let p = new Path2D();
|
||||||
|
let xVal = d[0][i];
|
||||||
|
let yVal = d[1][i];
|
||||||
|
u.ctx.strokeStyle = getRGB(u.data[2][i])
|
||||||
|
u.ctx.fillStyle = getRGB(u.data[2][i])
|
||||||
|
if (xVal >= scaleX.min && xVal <= scaleX.max && yVal >= scaleY.min && yVal <= scaleY.max) {
|
||||||
|
let cx = valToPosX(xVal, scaleX, xDim, xOff);
|
||||||
|
let cy = valToPosY(yVal, scaleY, yDim, yOff);
|
||||||
|
|
||||||
if (i % 3 == 0)
|
p.moveTo(cx + size/2, cy);
|
||||||
return 2
|
arc(p, cx, cy, size/2, 0, deg360);
|
||||||
else if (i % 3 == 1)
|
|
||||||
return 2.5
|
|
||||||
else
|
|
||||||
return 2
|
|
||||||
}
|
}
|
||||||
|
u.ctx.fill(p);
|
||||||
function render(ctx, data, cluster, width, height, colorDots, showTime, defaultMaxY) {
|
|
||||||
if (width <= 0)
|
|
||||||
return
|
|
||||||
|
|
||||||
const [minX, maxX, minY, maxY] = [0.01, 1000, 1., cluster?.flopRateSimd?.value || defaultMaxY]
|
|
||||||
const w = width - paddingLeft - paddingRight
|
|
||||||
const h = height - paddingTop - paddingBottom
|
|
||||||
|
|
||||||
// Helpers:
|
|
||||||
const [log10minX, log10maxX, log10minY, log10maxY] =
|
|
||||||
[Math.log10(minX), Math.log10(maxX), Math.log10(minY), Math.log10(maxY)]
|
|
||||||
|
|
||||||
/* Value -> Pixel-Coordinate */
|
|
||||||
const getCanvasX = (x) => {
|
|
||||||
x = Math.log10(x)
|
|
||||||
x -= log10minX; x /= (log10maxX - log10minX)
|
|
||||||
return Math.round((x * w) + paddingLeft)
|
|
||||||
}
|
|
||||||
const getCanvasY = (y) => {
|
|
||||||
y = Math.log10(y)
|
|
||||||
y -= log10minY
|
|
||||||
y /= (log10maxY - log10minY)
|
|
||||||
return Math.round((h - y * h) + paddingTop)
|
|
||||||
}
|
}
|
||||||
|
});
|
||||||
|
return null;
|
||||||
|
};
|
||||||
|
|
||||||
// Axes
|
const drawPoints = (u, seriesIdx, idx0, idx1) => {
|
||||||
ctx.fillStyle = 'black'
|
const size = 5 * devicePixelRatio;
|
||||||
ctx.strokeStyle = axesColor
|
uPlot.orient(u, seriesIdx, (series, dataX, dataY, scaleX, scaleY, valToPosX, valToPosY, xOff, yOff, xDim, yDim, moveTo, lineTo, rect, arc) => {
|
||||||
ctx.font = `${tickFontSize}px ${fontFamily}`
|
let d = u.data[seriesIdx];
|
||||||
ctx.beginPath()
|
u.ctx.strokeStyle = getRGB(0);
|
||||||
for (let x = minX, i = 0; x <= maxX; i++) {
|
u.ctx.fillStyle = getRGB(0);
|
||||||
let px = getCanvasX(x)
|
let deg360 = 2 * Math.PI;
|
||||||
let text = formatNumber(x)
|
let p = new Path2D();
|
||||||
let textWidth = ctx.measureText(text).width
|
for (let i = 0; i < d[0].length; i++) {
|
||||||
ctx.fillText(text,
|
let xVal = d[0][i];
|
||||||
Math.floor(px - (textWidth / 2)),
|
let yVal = d[1][i];
|
||||||
height - paddingBottom + tickFontSize + 5)
|
if (xVal >= scaleX.min && xVal <= scaleX.max && yVal >= scaleY.min && yVal <= scaleY.max) {
|
||||||
ctx.moveTo(px, paddingTop - 5)
|
let cx = valToPosX(xVal, scaleX, xDim, xOff);
|
||||||
ctx.lineTo(px, height - paddingBottom + 5)
|
let cy = valToPosY(yVal, scaleY, yDim, yOff);
|
||||||
|
p.moveTo(cx + size/2, cy);
|
||||||
x *= axisStepFactor(i, w)
|
arc(p, cx, cy, size/2, 0, deg360);
|
||||||
}
|
|
||||||
if (data.xLabel) {
|
|
||||||
ctx.font = `${labelFontSize}px ${fontFamily}`
|
|
||||||
let textWidth = ctx.measureText(data.xLabel).width
|
|
||||||
ctx.fillText(data.xLabel, Math.floor((width / 2) - (textWidth / 2)), height - 20)
|
|
||||||
}
|
|
||||||
|
|
||||||
ctx.textAlign = 'center'
|
|
||||||
ctx.font = `${tickFontSize}px ${fontFamily}`
|
|
||||||
for (let y = minY, i = 0; y <= maxY; i++) {
|
|
||||||
let py = getCanvasY(y)
|
|
||||||
ctx.moveTo(paddingLeft - 5, py)
|
|
||||||
ctx.lineTo(width - paddingRight + 5, py)
|
|
||||||
|
|
||||||
ctx.save()
|
|
||||||
ctx.translate(paddingLeft - 10, py)
|
|
||||||
ctx.rotate(-Math.PI / 2)
|
|
||||||
ctx.fillText(formatNumber(y), 0, 0)
|
|
||||||
ctx.restore()
|
|
||||||
|
|
||||||
y *= axisStepFactor(i)
|
|
||||||
}
|
|
||||||
if (data.yLabel) {
|
|
||||||
ctx.font = `${labelFontSize}px ${fontFamily}`
|
|
||||||
ctx.save()
|
|
||||||
ctx.translate(15, Math.floor(height / 2))
|
|
||||||
ctx.rotate(-Math.PI / 2)
|
|
||||||
ctx.fillText(data.yLabel, 0, 0)
|
|
||||||
ctx.restore()
|
|
||||||
}
|
|
||||||
ctx.stroke()
|
|
||||||
|
|
||||||
// Draw Data
|
|
||||||
if (data.x && data.y) {
|
|
||||||
for (let i = 0; i < data.x.length; i++) {
|
|
||||||
let x = data.x[i], y = data.y[i], c = data.c[i]
|
|
||||||
if (x == null || y == null || Number.isNaN(x) || Number.isNaN(y))
|
|
||||||
continue
|
|
||||||
|
|
||||||
const s = 3
|
|
||||||
const px = getCanvasX(x)
|
|
||||||
const py = getCanvasY(y)
|
|
||||||
|
|
||||||
ctx.fillStyle = getRGB(c)
|
|
||||||
ctx.beginPath()
|
|
||||||
ctx.arc(px, py, s, 0, Math.PI * 2, false)
|
|
||||||
ctx.fill()
|
|
||||||
}
|
|
||||||
} else if (data.tiles) {
|
|
||||||
const rows = data.tiles.length
|
|
||||||
const cols = data.tiles[0].length
|
|
||||||
|
|
||||||
const tileWidth = Math.ceil(w / cols)
|
|
||||||
const tileHeight = Math.ceil(h / rows)
|
|
||||||
|
|
||||||
let max = data.tiles.reduce((max, row) =>
|
|
||||||
Math.max(max, row.reduce((max, val) =>
|
|
||||||
Math.max(max, val)), 0), 0)
|
|
||||||
|
|
||||||
if (max == 0)
|
|
||||||
max = 1
|
|
||||||
|
|
||||||
const tileColor = val => `rgba(255, 0, 0, ${(val / max)})`
|
|
||||||
|
|
||||||
for (let i = 0; i < rows; i++) {
|
|
||||||
for (let j = 0; j < cols; j++) {
|
|
||||||
let px = paddingLeft + (j / cols) * w
|
|
||||||
let py = paddingTop + (h - (i / rows) * h) - tileHeight
|
|
||||||
|
|
||||||
ctx.fillStyle = tileColor(data.tiles[i][j])
|
|
||||||
ctx.fillRect(px, py, tileWidth, tileHeight)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
u.ctx.fill(p);
|
||||||
|
});
|
||||||
|
return null;
|
||||||
|
};
|
||||||
|
|
||||||
// Draw roofs
|
// Main Function
|
||||||
ctx.strokeStyle = 'black'
|
function render(plotData) {
|
||||||
ctx.lineWidth = 2
|
if (plotData) {
|
||||||
ctx.beginPath()
|
const opts = {
|
||||||
|
title: "",
|
||||||
|
mode: 2,
|
||||||
|
width: width,
|
||||||
|
height: height,
|
||||||
|
legend: {
|
||||||
|
show: false
|
||||||
|
},
|
||||||
|
cursor: { drag: { x: false, y: false } },
|
||||||
|
axes: [
|
||||||
|
{
|
||||||
|
label: 'Intensity [FLOPS/Byte]',
|
||||||
|
values: (u, vals) => vals.map(v => formatNumber(v))
|
||||||
|
},
|
||||||
|
{
|
||||||
|
label: 'Performace [GFLOPS]',
|
||||||
|
values: (u, vals) => vals.map(v => formatNumber(v))
|
||||||
|
}
|
||||||
|
],
|
||||||
|
scales: {
|
||||||
|
x: {
|
||||||
|
time: false,
|
||||||
|
range: [0.01, 1000],
|
||||||
|
distr: 3, // Render as log
|
||||||
|
log: 10, // log exp
|
||||||
|
},
|
||||||
|
y: {
|
||||||
|
range: [1.0, cluster?.flopRateSimd?.value ? nearestThousand(cluster.flopRateSimd.value) : 10000],
|
||||||
|
distr: 3, // Render as log
|
||||||
|
log: 10, // log exp
|
||||||
|
},
|
||||||
|
},
|
||||||
|
series: [
|
||||||
|
{},
|
||||||
|
{ paths: renderTime ? drawColorPoints : drawPoints }
|
||||||
|
],
|
||||||
|
hooks: {
|
||||||
|
drawClear: [
|
||||||
|
u => {
|
||||||
|
u.series.forEach((s, i) => {
|
||||||
|
if (i > 0)
|
||||||
|
s._paths = null;
|
||||||
|
});
|
||||||
|
},
|
||||||
|
],
|
||||||
|
draw: [
|
||||||
|
u => { // draw roofs when cluster set
|
||||||
|
// console.log(u)
|
||||||
if (cluster != null) {
|
if (cluster != null) {
|
||||||
|
const padding = u._padding // [top, right, bottom, left]
|
||||||
|
|
||||||
|
u.ctx.strokeStyle = 'black'
|
||||||
|
u.ctx.lineWidth = lineWidth
|
||||||
|
u.ctx.beginPath()
|
||||||
|
|
||||||
const ycut = 0.01 * cluster.memoryBandwidth.value
|
const ycut = 0.01 * cluster.memoryBandwidth.value
|
||||||
const scalarKnee = (cluster.flopRateScalar.value - ycut) / cluster.memoryBandwidth.value
|
const scalarKnee = (cluster.flopRateScalar.value - ycut) / cluster.memoryBandwidth.value
|
||||||
const simdKnee = (cluster.flopRateSimd.value - ycut) / cluster.memoryBandwidth.value
|
const simdKnee = (cluster.flopRateSimd.value - ycut) / cluster.memoryBandwidth.value
|
||||||
const scalarKneeX = getCanvasX(scalarKnee),
|
const scalarKneeX = u.valToPos(scalarKnee, 'x', true), // Value, axis, toCanvasPixels
|
||||||
simdKneeX = getCanvasX(simdKnee),
|
simdKneeX = u.valToPos(simdKnee, 'x', true),
|
||||||
flopRateScalarY = getCanvasY(cluster.flopRateScalar.value),
|
flopRateScalarY = u.valToPos(cluster.flopRateScalar.value, 'y', true),
|
||||||
flopRateSimdY = getCanvasY(cluster.flopRateSimd.value)
|
flopRateSimdY = u.valToPos(cluster.flopRateSimd.value, 'y', true)
|
||||||
|
|
||||||
if (scalarKneeX < width - paddingRight) {
|
// Debug get zoomLevel from browser
|
||||||
ctx.moveTo(scalarKneeX, flopRateScalarY)
|
// console.log("Zoom", Math.round(window.devicePixelRatio * 100))
|
||||||
ctx.lineTo(width - paddingRight, flopRateScalarY)
|
|
||||||
|
if (scalarKneeX < (width * window.devicePixelRatio) - (padding[1] * window.devicePixelRatio)) { // Top horizontal roofline
|
||||||
|
u.ctx.moveTo(scalarKneeX, flopRateScalarY)
|
||||||
|
u.ctx.lineTo((width * window.devicePixelRatio) - (padding[1] * window.devicePixelRatio), flopRateScalarY)
|
||||||
}
|
}
|
||||||
|
|
||||||
if (simdKneeX < width - paddingRight) {
|
if (simdKneeX < (width * window.devicePixelRatio) - (padding[1] * window.devicePixelRatio)) { // Lower horitontal roofline
|
||||||
ctx.moveTo(simdKneeX, flopRateSimdY)
|
u.ctx.moveTo(simdKneeX, flopRateSimdY)
|
||||||
ctx.lineTo(width - paddingRight, flopRateSimdY)
|
u.ctx.lineTo((width * window.devicePixelRatio) - (padding[1] * window.devicePixelRatio), flopRateSimdY)
|
||||||
}
|
}
|
||||||
|
|
||||||
let x1 = getCanvasX(0.01),
|
let x1 = u.valToPos(0.01, 'x', true),
|
||||||
y1 = getCanvasY(ycut),
|
y1 = u.valToPos(ycut, 'y', true)
|
||||||
x2 = getCanvasX(simdKnee),
|
|
||||||
|
let x2 = u.valToPos(simdKnee, 'x', true),
|
||||||
y2 = flopRateSimdY
|
y2 = flopRateSimdY
|
||||||
|
|
||||||
let xAxisIntersect = lineIntersect(
|
let xAxisIntersect = lineIntersect(
|
||||||
x1, y1, x2, y2,
|
x1, y1, x2, y2,
|
||||||
0, height - paddingBottom, width, height - paddingBottom)
|
u.valToPos(0.01, 'x', true), u.valToPos(1.0, 'y', true), // X-Axis Start Coords
|
||||||
|
u.valToPos(1000, 'x', true), u.valToPos(1.0, 'y', true) // X-Axis End Coords
|
||||||
|
)
|
||||||
|
|
||||||
if (xAxisIntersect.x > x1) {
|
if (xAxisIntersect.x > x1) {
|
||||||
x1 = xAxisIntersect.x
|
x1 = xAxisIntersect.x
|
||||||
y1 = xAxisIntersect.y
|
y1 = xAxisIntersect.y
|
||||||
}
|
}
|
||||||
|
|
||||||
ctx.moveTo(x1, y1)
|
// Diagonal
|
||||||
ctx.lineTo(x2, y2)
|
u.ctx.moveTo(x1, y1)
|
||||||
}
|
u.ctx.lineTo(x2, y2)
|
||||||
ctx.stroke()
|
|
||||||
|
|
||||||
if (colorDots && showTime && data.x && data.y) {
|
u.ctx.stroke()
|
||||||
// The Color Scale For Time Information
|
// Reset grid lineWidth
|
||||||
ctx.fillStyle = 'black'
|
u.ctx.lineWidth = 0.15
|
||||||
ctx.fillText('Time:', 17, height - 5)
|
|
||||||
const start = paddingLeft + 5
|
|
||||||
for (let x = start; x < width - paddingRight; x += 15) {
|
|
||||||
let c = (x - start) / (width - start - paddingRight)
|
|
||||||
ctx.fillStyle = getRGB(c)
|
|
||||||
ctx.beginPath()
|
|
||||||
ctx.arc(x, height - 10, 5, 0, Math.PI * 2, false)
|
|
||||||
ctx.fill()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function transformData(flopsAny, memBw, colorDots) { // Uses Metric Object
|
|
||||||
const nodes = flopsAny.series.length
|
|
||||||
const timesteps = flopsAny.series[0].data.length
|
|
||||||
|
|
||||||
/* c will contain values from 0 to 1 representing the time */
|
|
||||||
const x = [], y = [], c = []
|
|
||||||
|
|
||||||
if (flopsAny && memBw) {
|
|
||||||
for (let i = 0; i < nodes; i++) {
|
|
||||||
const flopsData = flopsAny.series[i].data
|
|
||||||
const memBwData = memBw.series[i].data
|
|
||||||
for (let j = 0; j < timesteps; j++) {
|
|
||||||
const f = flopsData[j], m = memBwData[j]
|
|
||||||
const intensity = f / m
|
|
||||||
if (Number.isNaN(intensity) || !Number.isFinite(intensity))
|
|
||||||
continue
|
|
||||||
|
|
||||||
x.push(intensity)
|
|
||||||
y.push(f)
|
|
||||||
c.push(colorDots ? j / timesteps : 0)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
};
|
||||||
|
uplot = new uPlot(opts, plotData, plotWrapper);
|
||||||
} else {
|
} else {
|
||||||
console.warn("transformData: metrics for 'mem_bw' and/or 'flops_any' missing!")
|
console.log('No data for roofline!')
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
x, y, c,
|
|
||||||
xLabel: 'Intensity [FLOPS/byte]',
|
|
||||||
yLabel: 'Performance [GFLOPS]'
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Return something to be plotted. The argument shall be the result of the
|
// Svelte and Sizechange
|
||||||
// `nodeMetrics` GraphQL query.
|
|
||||||
export function transformPerNodeData(nodes) {
|
|
||||||
const x = [], y = [], c = []
|
|
||||||
for (let node of nodes) {
|
|
||||||
let flopsAny = node.metrics.find(m => m.name == 'flops_any' && m.scope == 'node')?.metric
|
|
||||||
let memBw = node.metrics.find(m => m.name == 'mem_bw' && m.scope == 'node')?.metric
|
|
||||||
if (!flopsAny || !memBw) {
|
|
||||||
console.warn("transformPerNodeData: metrics for 'mem_bw' and/or 'flops_any' missing!")
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
let flopsData = flopsAny.series[0].data, memBwData = memBw.series[0].data
|
|
||||||
const f = flopsData[flopsData.length - 1], m = memBwData[flopsData.length - 1]
|
|
||||||
const intensity = f / m
|
|
||||||
if (Number.isNaN(intensity) || !Number.isFinite(intensity))
|
|
||||||
continue
|
|
||||||
|
|
||||||
x.push(intensity)
|
|
||||||
y.push(f)
|
|
||||||
c.push(0)
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
x, y, c,
|
|
||||||
xLabel: 'Intensity [FLOPS/byte]',
|
|
||||||
yLabel: 'Performance [GFLOPS]'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
</script>
|
|
||||||
|
|
||||||
<script>
|
|
||||||
import { onMount, tick } from 'svelte'
|
|
||||||
import { formatNumber } from '../units.js'
|
|
||||||
|
|
||||||
export let flopsAny = null
|
|
||||||
export let memBw = null
|
|
||||||
export let cluster = null
|
|
||||||
export let maxY = null
|
|
||||||
export let width = 500
|
|
||||||
export let height = 300
|
|
||||||
export let tiles = null
|
|
||||||
export let colorDots = true
|
|
||||||
export let showTime = true
|
|
||||||
export let data = null
|
|
||||||
|
|
||||||
console.assert(data || tiles || (flopsAny && memBw), "you must provide flopsAny and memBw or tiles!")
|
|
||||||
|
|
||||||
let ctx, canvasElement, prevWidth = width, prevHeight = height
|
|
||||||
data = data != null ? data : (flopsAny && memBw
|
|
||||||
? transformData(flopsAny.metric, memBw.metric, colorDots) // Use Metric Object from Parent
|
|
||||||
: {
|
|
||||||
tiles: tiles,
|
|
||||||
xLabel: 'Intensity [FLOPS/byte]',
|
|
||||||
yLabel: 'Performance [GFLOPS]'
|
|
||||||
})
|
|
||||||
|
|
||||||
onMount(() => {
|
onMount(() => {
|
||||||
ctx = canvasElement.getContext('2d')
|
render(data)
|
||||||
if (prevWidth != width || prevHeight != height) {
|
|
||||||
sizeChanged()
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
canvasElement.width = width
|
|
||||||
canvasElement.height = height
|
|
||||||
render(ctx, data, cluster, width, height, colorDots, showTime, maxY)
|
|
||||||
})
|
})
|
||||||
|
onDestroy(() => {
|
||||||
let timeoutId = null
|
if (uplot)
|
||||||
function sizeChanged() {
|
uplot.destroy()
|
||||||
if (!ctx)
|
|
||||||
return
|
|
||||||
|
|
||||||
if (timeoutId != null)
|
if (timeoutId != null)
|
||||||
clearTimeout(timeoutId)
|
clearTimeout(timeoutId)
|
||||||
|
})
|
||||||
|
function sizeChanged() {
|
||||||
|
if (timeoutId != null)
|
||||||
|
clearTimeout(timeoutId)
|
||||||
|
|
||||||
prevWidth = width
|
|
||||||
prevHeight = height
|
|
||||||
timeoutId = setTimeout(() => {
|
timeoutId = setTimeout(() => {
|
||||||
if (!canvasElement)
|
|
||||||
return
|
|
||||||
|
|
||||||
timeoutId = null
|
timeoutId = null
|
||||||
canvasElement.width = width
|
if (uplot)
|
||||||
canvasElement.height = height
|
uplot.destroy()
|
||||||
render(ctx, data, cluster, width, height, colorDots, showTime, maxY)
|
render(data)
|
||||||
}, 250)
|
}, 200)
|
||||||
}
|
}
|
||||||
|
$: if (allowSizeChange) sizeChanged(width, height)
|
||||||
$: sizeChanged(width, height)
|
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
|
{#if data != null}
|
||||||
|
<div bind:this={plotWrapper}/>
|
||||||
|
{:else}
|
||||||
|
<Card class="mx-4" body color="warning">Cannot render roofline: No data!</Card>
|
||||||
|
{/if}
|
234
web/frontend/src/plots/RooflineHeatmap.svelte
Normal file
234
web/frontend/src/plots/RooflineHeatmap.svelte
Normal file
@@ -0,0 +1,234 @@
|
|||||||
|
<div class="cc-plot">
|
||||||
|
<canvas bind:this={canvasElement} width="{prevWidth}" height="{prevHeight}"></canvas>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<script context="module">
|
||||||
|
const axesColor = '#aaaaaa'
|
||||||
|
const tickFontSize = 10
|
||||||
|
const labelFontSize = 12
|
||||||
|
const fontFamily = 'system-ui, -apple-system, "Segoe UI", Roboto, "Helvetica Neue", Arial, "Noto Sans", sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol", "Noto Color Emoji"'
|
||||||
|
const paddingLeft = 40,
|
||||||
|
paddingRight = 10,
|
||||||
|
paddingTop = 10,
|
||||||
|
paddingBottom = 50
|
||||||
|
|
||||||
|
function lineIntersect(x1, y1, x2, y2, x3, y3, x4, y4) {
|
||||||
|
let l = (y4 - y3) * (x2 - x1) - (x4 - x3) * (y2 - y1)
|
||||||
|
let a = ((x4 - x3) * (y1 - y3) - (y4 - y3) * (x1 - x3)) / l
|
||||||
|
return {
|
||||||
|
x: x1 + a * (x2 - x1),
|
||||||
|
y: y1 + a * (y2 - y1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function axisStepFactor(i, size) {
|
||||||
|
if (size && size < 500)
|
||||||
|
return 10
|
||||||
|
|
||||||
|
if (i % 3 == 0)
|
||||||
|
return 2
|
||||||
|
else if (i % 3 == 1)
|
||||||
|
return 2.5
|
||||||
|
else
|
||||||
|
return 2
|
||||||
|
}
|
||||||
|
|
||||||
|
function render(ctx, data, cluster, width, height, defaultMaxY) {
|
||||||
|
if (width <= 0)
|
||||||
|
return
|
||||||
|
|
||||||
|
const [minX, maxX, minY, maxY] = [0.01, 1000, 1., cluster?.flopRateSimd?.value || defaultMaxY]
|
||||||
|
const w = width - paddingLeft - paddingRight
|
||||||
|
const h = height - paddingTop - paddingBottom
|
||||||
|
|
||||||
|
// Helpers:
|
||||||
|
const [log10minX, log10maxX, log10minY, log10maxY] =
|
||||||
|
[Math.log10(minX), Math.log10(maxX), Math.log10(minY), Math.log10(maxY)]
|
||||||
|
|
||||||
|
/* Value -> Pixel-Coordinate */
|
||||||
|
const getCanvasX = (x) => {
|
||||||
|
x = Math.log10(x)
|
||||||
|
x -= log10minX; x /= (log10maxX - log10minX)
|
||||||
|
return Math.round((x * w) + paddingLeft)
|
||||||
|
}
|
||||||
|
const getCanvasY = (y) => {
|
||||||
|
y = Math.log10(y)
|
||||||
|
y -= log10minY
|
||||||
|
y /= (log10maxY - log10minY)
|
||||||
|
return Math.round((h - y * h) + paddingTop)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Axes
|
||||||
|
ctx.fillStyle = 'black'
|
||||||
|
ctx.strokeStyle = axesColor
|
||||||
|
ctx.font = `${tickFontSize}px ${fontFamily}`
|
||||||
|
ctx.beginPath()
|
||||||
|
for (let x = minX, i = 0; x <= maxX; i++) {
|
||||||
|
let px = getCanvasX(x)
|
||||||
|
let text = formatNumber(x)
|
||||||
|
let textWidth = ctx.measureText(text).width
|
||||||
|
ctx.fillText(text,
|
||||||
|
Math.floor(px - (textWidth / 2)),
|
||||||
|
height - paddingBottom + tickFontSize + 5)
|
||||||
|
ctx.moveTo(px, paddingTop - 5)
|
||||||
|
ctx.lineTo(px, height - paddingBottom + 5)
|
||||||
|
|
||||||
|
x *= axisStepFactor(i, w)
|
||||||
|
}
|
||||||
|
if (data.xLabel) {
|
||||||
|
ctx.font = `${labelFontSize}px ${fontFamily}`
|
||||||
|
let textWidth = ctx.measureText(data.xLabel).width
|
||||||
|
ctx.fillText(data.xLabel, Math.floor((width / 2) - (textWidth / 2)), height - 20)
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx.textAlign = 'center'
|
||||||
|
ctx.font = `${tickFontSize}px ${fontFamily}`
|
||||||
|
for (let y = minY, i = 0; y <= maxY; i++) {
|
||||||
|
let py = getCanvasY(y)
|
||||||
|
ctx.moveTo(paddingLeft - 5, py)
|
||||||
|
ctx.lineTo(width - paddingRight + 5, py)
|
||||||
|
|
||||||
|
ctx.save()
|
||||||
|
ctx.translate(paddingLeft - 10, py)
|
||||||
|
ctx.rotate(-Math.PI / 2)
|
||||||
|
ctx.fillText(formatNumber(y), 0, 0)
|
||||||
|
ctx.restore()
|
||||||
|
|
||||||
|
y *= axisStepFactor(i)
|
||||||
|
}
|
||||||
|
if (data.yLabel) {
|
||||||
|
ctx.font = `${labelFontSize}px ${fontFamily}`
|
||||||
|
ctx.save()
|
||||||
|
ctx.translate(15, Math.floor(height / 2))
|
||||||
|
ctx.rotate(-Math.PI / 2)
|
||||||
|
ctx.fillText(data.yLabel, 0, 0)
|
||||||
|
ctx.restore()
|
||||||
|
}
|
||||||
|
ctx.stroke()
|
||||||
|
|
||||||
|
// Draw Data
|
||||||
|
if (data.tiles) {
|
||||||
|
const rows = data.tiles.length
|
||||||
|
const cols = data.tiles[0].length
|
||||||
|
|
||||||
|
const tileWidth = Math.ceil(w / cols)
|
||||||
|
const tileHeight = Math.ceil(h / rows)
|
||||||
|
|
||||||
|
let max = data.tiles.reduce((max, row) =>
|
||||||
|
Math.max(max, row.reduce((max, val) =>
|
||||||
|
Math.max(max, val)), 0), 0)
|
||||||
|
|
||||||
|
if (max == 0)
|
||||||
|
max = 1
|
||||||
|
|
||||||
|
const tileColor = val => `rgba(255, 0, 0, ${(val / max)})`
|
||||||
|
|
||||||
|
for (let i = 0; i < rows; i++) {
|
||||||
|
for (let j = 0; j < cols; j++) {
|
||||||
|
let px = paddingLeft + (j / cols) * w
|
||||||
|
let py = paddingTop + (h - (i / rows) * h) - tileHeight
|
||||||
|
|
||||||
|
ctx.fillStyle = tileColor(data.tiles[i][j])
|
||||||
|
ctx.fillRect(px, py, tileWidth, tileHeight)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Draw roofs
|
||||||
|
ctx.strokeStyle = 'black'
|
||||||
|
ctx.lineWidth = 2
|
||||||
|
ctx.beginPath()
|
||||||
|
if (cluster != null) {
|
||||||
|
const ycut = 0.01 * cluster.memoryBandwidth.value
|
||||||
|
const scalarKnee = (cluster.flopRateScalar.value - ycut) / cluster.memoryBandwidth.value
|
||||||
|
const simdKnee = (cluster.flopRateSimd.value - ycut) / cluster.memoryBandwidth.value
|
||||||
|
const scalarKneeX = getCanvasX(scalarKnee),
|
||||||
|
simdKneeX = getCanvasX(simdKnee),
|
||||||
|
flopRateScalarY = getCanvasY(cluster.flopRateScalar.value),
|
||||||
|
flopRateSimdY = getCanvasY(cluster.flopRateSimd.value)
|
||||||
|
|
||||||
|
if (scalarKneeX < width - paddingRight) {
|
||||||
|
ctx.moveTo(scalarKneeX, flopRateScalarY)
|
||||||
|
ctx.lineTo(width - paddingRight, flopRateScalarY)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (simdKneeX < width - paddingRight) {
|
||||||
|
ctx.moveTo(simdKneeX, flopRateSimdY)
|
||||||
|
ctx.lineTo(width - paddingRight, flopRateSimdY)
|
||||||
|
}
|
||||||
|
|
||||||
|
let x1 = getCanvasX(0.01),
|
||||||
|
y1 = getCanvasY(ycut),
|
||||||
|
x2 = getCanvasX(simdKnee),
|
||||||
|
y2 = flopRateSimdY
|
||||||
|
|
||||||
|
let xAxisIntersect = lineIntersect(
|
||||||
|
x1, y1, x2, y2,
|
||||||
|
0, height - paddingBottom, width, height - paddingBottom)
|
||||||
|
|
||||||
|
if (xAxisIntersect.x > x1) {
|
||||||
|
x1 = xAxisIntersect.x
|
||||||
|
y1 = xAxisIntersect.y
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx.moveTo(x1, y1)
|
||||||
|
ctx.lineTo(x2, y2)
|
||||||
|
}
|
||||||
|
ctx.stroke()
|
||||||
|
}
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<script>
|
||||||
|
import { onMount } from 'svelte'
|
||||||
|
import { formatNumber } from '../units.js'
|
||||||
|
|
||||||
|
export let cluster = null
|
||||||
|
export let tiles = null
|
||||||
|
export let maxY = null
|
||||||
|
export let width = 500
|
||||||
|
export let height = 300
|
||||||
|
|
||||||
|
console.assert(tiles, "you must provide tiles!")
|
||||||
|
|
||||||
|
let ctx, canvasElement, prevWidth = width, prevHeight = height
|
||||||
|
const data = {
|
||||||
|
tiles: tiles,
|
||||||
|
xLabel: 'Intensity [FLOPS/byte]',
|
||||||
|
yLabel: 'Performance [GFLOPS]'
|
||||||
|
}
|
||||||
|
|
||||||
|
onMount(() => {
|
||||||
|
ctx = canvasElement.getContext('2d')
|
||||||
|
if (prevWidth != width || prevHeight != height) {
|
||||||
|
sizeChanged()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
canvasElement.width = width
|
||||||
|
canvasElement.height = height
|
||||||
|
render(ctx, data, cluster, width, height, maxY)
|
||||||
|
})
|
||||||
|
|
||||||
|
let timeoutId = null
|
||||||
|
function sizeChanged() {
|
||||||
|
if (!ctx)
|
||||||
|
return
|
||||||
|
|
||||||
|
if (timeoutId != null)
|
||||||
|
clearTimeout(timeoutId)
|
||||||
|
|
||||||
|
prevWidth = width
|
||||||
|
prevHeight = height
|
||||||
|
timeoutId = setTimeout(() => {
|
||||||
|
if (!canvasElement)
|
||||||
|
return
|
||||||
|
|
||||||
|
timeoutId = null
|
||||||
|
canvasElement.width = width
|
||||||
|
canvasElement.height = height
|
||||||
|
render(ctx, data, cluster, width, height, maxY)
|
||||||
|
}, 250)
|
||||||
|
}
|
||||||
|
|
||||||
|
$: sizeChanged(width, height)
|
||||||
|
</script>
|
@@ -6,8 +6,8 @@ const power = [1, 1e3, 1e6, 1e9, 1e12, 1e15, 1e18, 1e21]
|
|||||||
const prefix = ['', 'K', 'M', 'G', 'T', 'P', 'E']
|
const prefix = ['', 'K', 'M', 'G', 'T', 'P', 'E']
|
||||||
|
|
||||||
export function formatNumber(x) {
|
export function formatNumber(x) {
|
||||||
if ( isNaN(x) ) {
|
if ( isNaN(x) || x == null) {
|
||||||
return x // Return if String , used in Histograms
|
return x // Return if String or Null
|
||||||
} else {
|
} else {
|
||||||
for (let i = 0; i < prefix.length; i++)
|
for (let i = 0; i < prefix.length; i++)
|
||||||
if (power[i] <= x && x < power[i+1])
|
if (power[i] <= x && x < power[i+1])
|
||||||
|
@@ -6,7 +6,7 @@ import {
|
|||||||
} from "@urql/svelte";
|
} from "@urql/svelte";
|
||||||
import { setContext, getContext, hasContext, onDestroy, tick } from "svelte";
|
import { setContext, getContext, hasContext, onDestroy, tick } from "svelte";
|
||||||
import { readable } from "svelte/store";
|
import { readable } from "svelte/store";
|
||||||
import { formatNumber } from './units.js'
|
// import { formatNumber } from './units.js'
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Call this function only at component initialization time!
|
* Call this function only at component initialization time!
|
||||||
@@ -326,8 +326,11 @@ export function convert2uplot(canvasData) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export function binsFromFootprint(weights, scope, values, numBins) {
|
export function binsFromFootprint(weights, scope, values, numBins) {
|
||||||
let min = 0, max = 0
|
let min = 0, max = 0 //, median = 0
|
||||||
if (values.length != 0) {
|
if (values.length != 0) {
|
||||||
|
// Extreme, wrong peak vlaues: Filter here or backend?
|
||||||
|
// median = median(values)
|
||||||
|
|
||||||
for (let x of values) {
|
for (let x of values) {
|
||||||
min = Math.min(min, x)
|
min = Math.min(min, x)
|
||||||
max = Math.max(max, x)
|
max = Math.max(max, x)
|
||||||
@@ -363,3 +366,75 @@ export function binsFromFootprint(weights, scope, values, numBins) {
|
|||||||
bins: bins
|
bins: bins
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function transformDataForRoofline(flopsAny, memBw) { // Uses Metric Objects: {series:[{},{},...], timestep:60, name:$NAME}
|
||||||
|
const nodes = flopsAny.series.length
|
||||||
|
const timesteps = flopsAny.series[0].data.length
|
||||||
|
|
||||||
|
/* c will contain values from 0 to 1 representing the time */
|
||||||
|
let data = null
|
||||||
|
const x = [], y = [], c = []
|
||||||
|
|
||||||
|
if (flopsAny && memBw) {
|
||||||
|
for (let i = 0; i < nodes; i++) {
|
||||||
|
const flopsData = flopsAny.series[i].data
|
||||||
|
const memBwData = memBw.series[i].data
|
||||||
|
for (let j = 0; j < timesteps; j++) {
|
||||||
|
const f = flopsData[j], m = memBwData[j]
|
||||||
|
const intensity = f / m
|
||||||
|
if (Number.isNaN(intensity) || !Number.isFinite(intensity))
|
||||||
|
continue
|
||||||
|
|
||||||
|
x.push(intensity)
|
||||||
|
y.push(f)
|
||||||
|
c.push(j / timesteps)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
console.warn("transformData: metrics for 'mem_bw' and/or 'flops_any' missing!")
|
||||||
|
}
|
||||||
|
if (x.length > 0 && y.length > 0 && c.length > 0) {
|
||||||
|
data = [null, [x, y], c] // for dataformat see roofline.svelte
|
||||||
|
}
|
||||||
|
return data
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return something to be plotted. The argument shall be the result of the
|
||||||
|
// `nodeMetrics` GraphQL query.
|
||||||
|
export function transformPerNodeDataForRoofline(nodes) {
|
||||||
|
let data = null
|
||||||
|
const x = [], y = []
|
||||||
|
for (let node of nodes) {
|
||||||
|
let flopsAny = node.metrics.find(m => m.name == 'flops_any' && m.scope == 'node')?.metric
|
||||||
|
let memBw = node.metrics.find(m => m.name == 'mem_bw' && m.scope == 'node')?.metric
|
||||||
|
if (!flopsAny || !memBw) {
|
||||||
|
console.warn("transformPerNodeData: metrics for 'mem_bw' and/or 'flops_any' missing!")
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
let flopsData = flopsAny.series[0].data, memBwData = memBw.series[0].data
|
||||||
|
const f = flopsData[flopsData.length - 1], m = memBwData[flopsData.length - 1]
|
||||||
|
const intensity = f / m
|
||||||
|
if (Number.isNaN(intensity) || !Number.isFinite(intensity))
|
||||||
|
continue
|
||||||
|
|
||||||
|
x.push(intensity)
|
||||||
|
y.push(f)
|
||||||
|
}
|
||||||
|
if (x.length > 0 && y.length > 0) {
|
||||||
|
data = [null, [x, y], []] // for dataformat see roofline.svelte
|
||||||
|
}
|
||||||
|
return data
|
||||||
|
}
|
||||||
|
|
||||||
|
// https://stackoverflow.com/questions/45309447/calculating-median-javascript
|
||||||
|
// function median(numbers) {
|
||||||
|
// const sorted = Array.from(numbers).sort((a, b) => a - b);
|
||||||
|
// const middle = Math.floor(sorted.length / 2);
|
||||||
|
|
||||||
|
// if (sorted.length % 2 === 0) {
|
||||||
|
// return (sorted[middle - 1] + sorted[middle]) / 2;
|
||||||
|
// }
|
||||||
|
|
||||||
|
// return sorted[middle];
|
||||||
|
// }
|
||||||
|
Reference in New Issue
Block a user