mirror of
https://github.com/ClusterCockpit/cc-backend
synced 2025-04-17 17:25:55 +02:00
Merge branch 'master' into 135-batch-scheduler-integration
This commit is contained in:
commit
03175681b6
2
Makefile
2
Makefile
@ -2,7 +2,7 @@ TARGET = ./cc-backend
|
||||
VAR = ./var
|
||||
CFG = config.json .env
|
||||
FRONTEND = ./web/frontend
|
||||
VERSION = 1.1.0
|
||||
VERSION = 1.2.0
|
||||
GIT_HASH := $(shell git rev-parse --short HEAD || echo 'development')
|
||||
CURRENT_TIME = $(shell date +"%Y-%m-%d:T%H:%M:%S")
|
||||
LD_FLAGS = '-s -X main.date=${CURRENT_TIME} -X main.version=${VERSION} -X main.commit=${GIT_HASH}'
|
||||
|
@ -165,7 +165,7 @@ If you start `cc-backend` with the `-dev` flag, the GraphQL Playground UI is ava
|
||||
This project integrates [swagger ui] (https://swagger.io/tools/swagger-ui/) to document and test its REST API.
|
||||
The swagger documentation files can be found in `./api/`.
|
||||
You can generate the swagger-ui configuration by running `go run github.com/swaggo/swag/cmd/swag init -d ./internal/api,./pkg/schema -g rest.go -o ./api `.
|
||||
You need to move the created `./api/doc.go` to `./internal/api/doc.go`.
|
||||
You need to move the created `./api/docs.go` to `./internal/api/docs.go`.
|
||||
If you start cc-backend with the `-dev` flag, the Swagger interface is available
|
||||
at http://localhost:8080/swagger/.
|
||||
You must enter a JWT key for a user with the API role.
|
||||
|
@ -1,13 +1,18 @@
|
||||
# `cc-backend` version 1.1.0
|
||||
# `cc-backend` version 1.2.0
|
||||
|
||||
Supports job archive version 1 and database version 6.
|
||||
|
||||
This is a minor release of `cc-backend`, the API backend and frontend
|
||||
implementation of ClusterCockpit.
|
||||
|
||||
** Breaking changes v1 **
|
||||
** Breaking changes **
|
||||
|
||||
The aggregate job statistic core hours is now computed using the job table
|
||||
* The LDAP configuration option user_filter was changed and now should not include
|
||||
the uid wildcard. Example:
|
||||
- Old: `"user_filter": "(&(objectclass=posixAccount)(uid=*))"`
|
||||
- New: `"user_filter": "(&(objectclass=posixAccount))"`
|
||||
|
||||
* The aggregate job statistic core hours is now computed using the job table
|
||||
column `num_hwthreads`. In a future release this column will be renamed to
|
||||
`num_cores`. For correct display of core hours `num_hwthreads` must be correctly
|
||||
filled on job start. If your existing jobs do not provide the correct value in
|
||||
@ -16,6 +21,10 @@ if you have exclusive jobs, only. Please be aware that we treat this column as
|
||||
it is the number of cores. In case you have SMT enabled and `num_hwthreads`
|
||||
is not the number of cores the core hours will be too high by a factor!
|
||||
|
||||
* The jwts key is now mandatory in config.json. It has to set max-age for
|
||||
validity. Some key names have changed, please refer to
|
||||
[config documentation](./configs/README.md) for details.
|
||||
|
||||
** NOTE **
|
||||
If you are using the sqlite3 backend the `PRAGMA` option `foreign_keys` must be
|
||||
explicitly set to ON. If using the sqlite3 console it is per default set to
|
||||
|
@ -156,12 +156,18 @@ type MetricFootprints {
|
||||
}
|
||||
|
||||
type Footprints {
|
||||
nodehours: [NullableFloat!]!
|
||||
timeWeights: TimeWeights!
|
||||
metrics: [MetricFootprints!]!
|
||||
}
|
||||
|
||||
type TimeWeights {
|
||||
nodeHours: [NullableFloat!]!
|
||||
accHours: [NullableFloat!]!
|
||||
coreHours: [NullableFloat!]!
|
||||
}
|
||||
|
||||
enum Aggregate { USER, PROJECT, CLUSTER }
|
||||
enum Weights { NODE_COUNT, NODE_HOURS }
|
||||
enum SortByAggregate { TOTALWALLTIME, TOTALJOBS, TOTALNODES, TOTALNODEHOURS, TOTALCORES, TOTALCOREHOURS, TOTALACCS, TOTALACCHOURS }
|
||||
|
||||
type NodeMetrics {
|
||||
host: String!
|
||||
@ -192,8 +198,7 @@ type Query {
|
||||
jobsFootprints(filter: [JobFilter!], metrics: [String!]!): Footprints
|
||||
|
||||
jobs(filter: [JobFilter!], page: PageRequest, order: OrderByInput): JobResultList!
|
||||
jobsStatistics(filter: [JobFilter!], groupBy: Aggregate): [JobsStatistics!]!
|
||||
jobsCount(filter: [JobFilter]!, groupBy: Aggregate!, weight: Weights, limit: Int): [Count!]!
|
||||
jobsStatistics(filter: [JobFilter!], page: PageRequest, sortBy: SortByAggregate, groupBy: Aggregate): [JobsStatistics!]!
|
||||
|
||||
rooflineHeatmap(filter: [JobFilter!]!, rows: Int!, cols: Int!, minX: Float!, minY: Float!, maxX: Float!, maxY: Float!): [[Float!]!]!
|
||||
|
||||
@ -288,11 +293,16 @@ type JobsStatistics {
|
||||
runningJobs: Int! # Number of running jobs
|
||||
shortJobs: Int! # Number of jobs with a duration of less than duration
|
||||
totalWalltime: Int! # Sum of the duration of all matched jobs in hours
|
||||
totalNodes: Int! # Sum of the nodes of all matched jobs
|
||||
totalNodeHours: Int! # Sum of the node hours of all matched jobs
|
||||
totalCores: Int! # Sum of the cores of all matched jobs
|
||||
totalCoreHours: Int! # Sum of the core hours of all matched jobs
|
||||
totalAccs: Int! # Sum of the accs of all matched jobs
|
||||
totalAccHours: Int! # Sum of the gpu hours of all matched jobs
|
||||
histDuration: [HistoPoint!]! # value: hour, count: number of jobs with a rounded duration of value
|
||||
histNumNodes: [HistoPoint!]! # value: number of nodes, count: number of jobs with that number of nodes
|
||||
histNumCores: [HistoPoint!]! # value: number of cores, count: number of jobs with that number of cores
|
||||
histNumAccs: [HistoPoint!]! # value: number of accs, count: number of jobs with that number of accs
|
||||
}
|
||||
|
||||
input PageRequest {
|
||||
|
362
api/swagger.json
362
api/swagger.json
@ -12,7 +12,7 @@
|
||||
"name": "MIT License",
|
||||
"url": "https://opensource.org/licenses/MIT"
|
||||
},
|
||||
"version": "1"
|
||||
"version": "1.0.0"
|
||||
},
|
||||
"host": "localhost:8080",
|
||||
"basePath": "/api",
|
||||
@ -707,9 +707,367 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/user/{id}": {
|
||||
"post": {
|
||||
"security": [
|
||||
{
|
||||
"ApiKeyAuth": []
|
||||
}
|
||||
],
|
||||
"description": "Modifies user defined by username (id) in one of four possible ways.\nIf more than one formValue is set then only the highest priority field is used.",
|
||||
"consumes": [
|
||||
"multipart/form-data"
|
||||
],
|
||||
"produces": [
|
||||
"text/plain"
|
||||
],
|
||||
"tags": [
|
||||
"add and modify"
|
||||
],
|
||||
"summary": "Updates an existing user",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Database ID of User",
|
||||
"name": "id",
|
||||
"in": "path",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"enum": [
|
||||
"admin",
|
||||
"support",
|
||||
"manager",
|
||||
"user",
|
||||
"api"
|
||||
],
|
||||
"type": "string",
|
||||
"description": "Priority 1: Role to add",
|
||||
"name": "add-role",
|
||||
"in": "formData"
|
||||
},
|
||||
{
|
||||
"enum": [
|
||||
"admin",
|
||||
"support",
|
||||
"manager",
|
||||
"user",
|
||||
"api"
|
||||
],
|
||||
"type": "string",
|
||||
"description": "Priority 2: Role to remove",
|
||||
"name": "remove-role",
|
||||
"in": "formData"
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Priority 3: Project to add",
|
||||
"name": "add-project",
|
||||
"in": "formData"
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Priority 4: Project to remove",
|
||||
"name": "remove-project",
|
||||
"in": "formData"
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Success Response Message",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"400": {
|
||||
"description": "Bad Request",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"401": {
|
||||
"description": "Unauthorized",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"403": {
|
||||
"description": "Forbidden",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"422": {
|
||||
"description": "Unprocessable Entity: The user could not be updated",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"500": {
|
||||
"description": "Internal Server Error",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/users/": {
|
||||
"get": {
|
||||
"security": [
|
||||
{
|
||||
"ApiKeyAuth": []
|
||||
}
|
||||
],
|
||||
"description": "Returns a JSON-encoded list of users.\nRequired query-parameter defines if all users or only users with additional special roles are returned.",
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"query"
|
||||
],
|
||||
"summary": "Returns a list of users",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "boolean",
|
||||
"description": "If returned list should contain all users or only users with additional special roles",
|
||||
"name": "not-just-user",
|
||||
"in": "query",
|
||||
"required": true
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "List of users returned successfully",
|
||||
"schema": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/api.ApiReturnedUser"
|
||||
}
|
||||
}
|
||||
},
|
||||
"400": {
|
||||
"description": "Bad Request",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"401": {
|
||||
"description": "Unauthorized",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"403": {
|
||||
"description": "Forbidden",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"500": {
|
||||
"description": "Internal Server Error",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"post": {
|
||||
"security": [
|
||||
{
|
||||
"ApiKeyAuth": []
|
||||
}
|
||||
],
|
||||
"description": "User specified in form data will be saved to database.",
|
||||
"consumes": [
|
||||
"multipart/form-data"
|
||||
],
|
||||
"produces": [
|
||||
"text/plain"
|
||||
],
|
||||
"tags": [
|
||||
"add and modify"
|
||||
],
|
||||
"summary": "Adds a new user",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Unique user ID",
|
||||
"name": "username",
|
||||
"in": "formData",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"description": "User password",
|
||||
"name": "password",
|
||||
"in": "formData",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"enum": [
|
||||
"admin",
|
||||
"support",
|
||||
"manager",
|
||||
"user",
|
||||
"api"
|
||||
],
|
||||
"type": "string",
|
||||
"description": "User role",
|
||||
"name": "role",
|
||||
"in": "formData",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Managed project, required for new manager role user",
|
||||
"name": "project",
|
||||
"in": "formData"
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Users name",
|
||||
"name": "name",
|
||||
"in": "formData"
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Users email",
|
||||
"name": "email",
|
||||
"in": "formData"
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Success Response",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"400": {
|
||||
"description": "Bad Request",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"401": {
|
||||
"description": "Unauthorized",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"403": {
|
||||
"description": "Forbidden",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"422": {
|
||||
"description": "Unprocessable Entity: creating user failed",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"500": {
|
||||
"description": "Internal Server Error",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"delete": {
|
||||
"security": [
|
||||
{
|
||||
"ApiKeyAuth": []
|
||||
}
|
||||
],
|
||||
"description": "User defined by username in form data will be deleted from database.",
|
||||
"consumes": [
|
||||
"multipart/form-data"
|
||||
],
|
||||
"produces": [
|
||||
"text/plain"
|
||||
],
|
||||
"tags": [
|
||||
"remove"
|
||||
],
|
||||
"summary": "Deletes a user",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "User ID to delete",
|
||||
"name": "username",
|
||||
"in": "formData",
|
||||
"required": true
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "User deleted successfully"
|
||||
},
|
||||
"400": {
|
||||
"description": "Bad Request",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"401": {
|
||||
"description": "Unauthorized",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"403": {
|
||||
"description": "Forbidden",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"422": {
|
||||
"description": "Unprocessable Entity: deleting user failed",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"500": {
|
||||
"description": "Internal Server Error",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"definitions": {
|
||||
"api.ApiReturnedUser": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"email": {
|
||||
"type": "string"
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"projects": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"roles": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"username": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"api.ApiTag": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
@ -1366,7 +1724,7 @@
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"description": "The unique DB identifier of a tag\nThe unique DB identifier of a tag",
|
||||
"description": "The unique DB identifier of a tag",
|
||||
"type": "integer"
|
||||
},
|
||||
"name": {
|
||||
|
248
api/swagger.yaml
248
api/swagger.yaml
@ -1,5 +1,22 @@
|
||||
basePath: /api
|
||||
definitions:
|
||||
api.ApiReturnedUser:
|
||||
properties:
|
||||
email:
|
||||
type: string
|
||||
name:
|
||||
type: string
|
||||
projects:
|
||||
items:
|
||||
type: string
|
||||
type: array
|
||||
roles:
|
||||
items:
|
||||
type: string
|
||||
type: array
|
||||
username:
|
||||
type: string
|
||||
type: object
|
||||
api.ApiTag:
|
||||
properties:
|
||||
name:
|
||||
@ -495,9 +512,7 @@ definitions:
|
||||
description: Defines a tag using name and type.
|
||||
properties:
|
||||
id:
|
||||
description: |-
|
||||
The unique DB identifier of a tag
|
||||
The unique DB identifier of a tag
|
||||
description: The unique DB identifier of a tag
|
||||
type: integer
|
||||
name:
|
||||
description: Tag Name
|
||||
@ -526,7 +541,7 @@ info:
|
||||
name: MIT License
|
||||
url: https://opensource.org/licenses/MIT
|
||||
title: ClusterCockpit REST API
|
||||
version: "1"
|
||||
version: 1.0.0
|
||||
paths:
|
||||
/jobs/:
|
||||
get:
|
||||
@ -996,6 +1011,231 @@ paths:
|
||||
summary: Adds one or more tags to a job
|
||||
tags:
|
||||
- add and modify
|
||||
/user/{id}:
|
||||
post:
|
||||
consumes:
|
||||
- multipart/form-data
|
||||
description: |-
|
||||
Modifies user defined by username (id) in one of four possible ways.
|
||||
If more than one formValue is set then only the highest priority field is used.
|
||||
parameters:
|
||||
- description: Database ID of User
|
||||
in: path
|
||||
name: id
|
||||
required: true
|
||||
type: string
|
||||
- description: 'Priority 1: Role to add'
|
||||
enum:
|
||||
- admin
|
||||
- support
|
||||
- manager
|
||||
- user
|
||||
- api
|
||||
in: formData
|
||||
name: add-role
|
||||
type: string
|
||||
- description: 'Priority 2: Role to remove'
|
||||
enum:
|
||||
- admin
|
||||
- support
|
||||
- manager
|
||||
- user
|
||||
- api
|
||||
in: formData
|
||||
name: remove-role
|
||||
type: string
|
||||
- description: 'Priority 3: Project to add'
|
||||
in: formData
|
||||
name: add-project
|
||||
type: string
|
||||
- description: 'Priority 4: Project to remove'
|
||||
in: formData
|
||||
name: remove-project
|
||||
type: string
|
||||
produces:
|
||||
- text/plain
|
||||
responses:
|
||||
"200":
|
||||
description: Success Response Message
|
||||
schema:
|
||||
type: string
|
||||
"400":
|
||||
description: Bad Request
|
||||
schema:
|
||||
type: string
|
||||
"401":
|
||||
description: Unauthorized
|
||||
schema:
|
||||
type: string
|
||||
"403":
|
||||
description: Forbidden
|
||||
schema:
|
||||
type: string
|
||||
"422":
|
||||
description: 'Unprocessable Entity: The user could not be updated'
|
||||
schema:
|
||||
type: string
|
||||
"500":
|
||||
description: Internal Server Error
|
||||
schema:
|
||||
type: string
|
||||
security:
|
||||
- ApiKeyAuth: []
|
||||
summary: Updates an existing user
|
||||
tags:
|
||||
- add and modify
|
||||
/users/:
|
||||
delete:
|
||||
consumes:
|
||||
- multipart/form-data
|
||||
description: User defined by username in form data will be deleted from database.
|
||||
parameters:
|
||||
- description: User ID to delete
|
||||
in: formData
|
||||
name: username
|
||||
required: true
|
||||
type: string
|
||||
produces:
|
||||
- text/plain
|
||||
responses:
|
||||
"200":
|
||||
description: User deleted successfully
|
||||
"400":
|
||||
description: Bad Request
|
||||
schema:
|
||||
type: string
|
||||
"401":
|
||||
description: Unauthorized
|
||||
schema:
|
||||
type: string
|
||||
"403":
|
||||
description: Forbidden
|
||||
schema:
|
||||
type: string
|
||||
"422":
|
||||
description: 'Unprocessable Entity: deleting user failed'
|
||||
schema:
|
||||
type: string
|
||||
"500":
|
||||
description: Internal Server Error
|
||||
schema:
|
||||
type: string
|
||||
security:
|
||||
- ApiKeyAuth: []
|
||||
summary: Deletes a user
|
||||
tags:
|
||||
- remove
|
||||
get:
|
||||
description: |-
|
||||
Returns a JSON-encoded list of users.
|
||||
Required query-parameter defines if all users or only users with additional special roles are returned.
|
||||
parameters:
|
||||
- description: If returned list should contain all users or only users with
|
||||
additional special roles
|
||||
in: query
|
||||
name: not-just-user
|
||||
required: true
|
||||
type: boolean
|
||||
produces:
|
||||
- application/json
|
||||
responses:
|
||||
"200":
|
||||
description: List of users returned successfully
|
||||
schema:
|
||||
items:
|
||||
$ref: '#/definitions/api.ApiReturnedUser'
|
||||
type: array
|
||||
"400":
|
||||
description: Bad Request
|
||||
schema:
|
||||
type: string
|
||||
"401":
|
||||
description: Unauthorized
|
||||
schema:
|
||||
type: string
|
||||
"403":
|
||||
description: Forbidden
|
||||
schema:
|
||||
type: string
|
||||
"500":
|
||||
description: Internal Server Error
|
||||
schema:
|
||||
type: string
|
||||
security:
|
||||
- ApiKeyAuth: []
|
||||
summary: Returns a list of users
|
||||
tags:
|
||||
- query
|
||||
post:
|
||||
consumes:
|
||||
- multipart/form-data
|
||||
description: User specified in form data will be saved to database.
|
||||
parameters:
|
||||
- description: Unique user ID
|
||||
in: formData
|
||||
name: username
|
||||
required: true
|
||||
type: string
|
||||
- description: User password
|
||||
in: formData
|
||||
name: password
|
||||
required: true
|
||||
type: string
|
||||
- description: User role
|
||||
enum:
|
||||
- admin
|
||||
- support
|
||||
- manager
|
||||
- user
|
||||
- api
|
||||
in: formData
|
||||
name: role
|
||||
required: true
|
||||
type: string
|
||||
- description: Managed project, required for new manager role user
|
||||
in: formData
|
||||
name: project
|
||||
type: string
|
||||
- description: Users name
|
||||
in: formData
|
||||
name: name
|
||||
type: string
|
||||
- description: Users email
|
||||
in: formData
|
||||
name: email
|
||||
type: string
|
||||
produces:
|
||||
- text/plain
|
||||
responses:
|
||||
"200":
|
||||
description: Success Response
|
||||
schema:
|
||||
type: string
|
||||
"400":
|
||||
description: Bad Request
|
||||
schema:
|
||||
type: string
|
||||
"401":
|
||||
description: Unauthorized
|
||||
schema:
|
||||
type: string
|
||||
"403":
|
||||
description: Forbidden
|
||||
schema:
|
||||
type: string
|
||||
"422":
|
||||
description: 'Unprocessable Entity: creating user failed'
|
||||
schema:
|
||||
type: string
|
||||
"500":
|
||||
description: Internal Server Error
|
||||
schema:
|
||||
type: string
|
||||
security:
|
||||
- ApiKeyAuth: []
|
||||
summary: Adds a new user
|
||||
tags:
|
||||
- add and modify
|
||||
securityDefinitions:
|
||||
ApiKeyAuth:
|
||||
in: header
|
||||
|
@ -211,10 +211,7 @@ func main() {
|
||||
var authentication *auth.Authentication
|
||||
if !config.Keys.DisableAuthentication {
|
||||
var err error
|
||||
if authentication, err = auth.Init(db.DB, map[string]interface{}{
|
||||
"ldap": config.Keys.LdapConfig,
|
||||
"jwt": config.Keys.JwtConfig,
|
||||
}); err != nil {
|
||||
if authentication, err = auth.Init(); err != nil {
|
||||
log.Fatalf("auth initialization failed: %v", err)
|
||||
}
|
||||
|
||||
@ -228,14 +225,16 @@ func main() {
|
||||
log.Fatal("invalid argument format for user creation")
|
||||
}
|
||||
|
||||
if err := authentication.AddUser(&auth.User{
|
||||
ur := repository.GetUserRepository()
|
||||
if err := ur.AddUser(&schema.User{
|
||||
Username: parts[0], Projects: make([]string, 0), Password: parts[2], Roles: strings.Split(parts[1], ","),
|
||||
}); err != nil {
|
||||
log.Fatalf("adding '%s' user authentication failed: %v", parts[0], err)
|
||||
}
|
||||
}
|
||||
if flagDelUser != "" {
|
||||
if err := authentication.DelUser(flagDelUser); err != nil {
|
||||
ur := repository.GetUserRepository()
|
||||
if err := ur.DelUser(flagDelUser); err != nil {
|
||||
log.Fatalf("deleting user failed: %v", err)
|
||||
}
|
||||
}
|
||||
@ -252,12 +251,13 @@ func main() {
|
||||
}
|
||||
|
||||
if flagGenJWT != "" {
|
||||
user, err := authentication.GetUser(flagGenJWT)
|
||||
ur := repository.GetUserRepository()
|
||||
user, err := ur.GetUser(flagGenJWT)
|
||||
if err != nil {
|
||||
log.Fatalf("could not get user from JWT: %v", err)
|
||||
}
|
||||
|
||||
if !user.HasRole(auth.RoleApi) {
|
||||
if !user.HasRole(schema.RoleApi) {
|
||||
log.Warnf("user '%s' does not have the API role", user.Username)
|
||||
}
|
||||
|
||||
@ -327,21 +327,19 @@ func main() {
|
||||
|
||||
r.HandleFunc("/login", func(rw http.ResponseWriter, r *http.Request) {
|
||||
rw.Header().Add("Content-Type", "text/html; charset=utf-8")
|
||||
web.RenderTemplate(rw, r, "login.tmpl", &web.Page{Title: "Login", Build: buildInfo})
|
||||
web.RenderTemplate(rw, "login.tmpl", &web.Page{Title: "Login", Build: buildInfo})
|
||||
}).Methods(http.MethodGet)
|
||||
r.HandleFunc("/imprint", func(rw http.ResponseWriter, r *http.Request) {
|
||||
rw.Header().Add("Content-Type", "text/html; charset=utf-8")
|
||||
web.RenderTemplate(rw, r, "imprint.tmpl", &web.Page{Title: "Imprint", Build: buildInfo})
|
||||
web.RenderTemplate(rw, "imprint.tmpl", &web.Page{Title: "Imprint", Build: buildInfo})
|
||||
})
|
||||
r.HandleFunc("/privacy", func(rw http.ResponseWriter, r *http.Request) {
|
||||
rw.Header().Add("Content-Type", "text/html; charset=utf-8")
|
||||
web.RenderTemplate(rw, r, "privacy.tmpl", &web.Page{Title: "Privacy", Build: buildInfo})
|
||||
web.RenderTemplate(rw, "privacy.tmpl", &web.Page{Title: "Privacy", Build: buildInfo})
|
||||
})
|
||||
|
||||
// Some routes, such as /login or /query, should only be accessible to a user that is logged in.
|
||||
// Those should be mounted to this subrouter. If authentication is enabled, a middleware will prevent
|
||||
// any unauthenticated accesses.
|
||||
secured := r.PathPrefix("/").Subrouter()
|
||||
|
||||
if !config.Keys.DisableAuthentication {
|
||||
r.Handle("/login", authentication.Login(
|
||||
// On success:
|
||||
@ -351,7 +349,7 @@ func main() {
|
||||
func(rw http.ResponseWriter, r *http.Request, err error) {
|
||||
rw.Header().Add("Content-Type", "text/html; charset=utf-8")
|
||||
rw.WriteHeader(http.StatusUnauthorized)
|
||||
web.RenderTemplate(rw, r, "login.tmpl", &web.Page{
|
||||
web.RenderTemplate(rw, "login.tmpl", &web.Page{
|
||||
Title: "Login failed - ClusterCockpit",
|
||||
MsgType: "alert-warning",
|
||||
Message: err.Error(),
|
||||
@ -359,16 +357,33 @@ func main() {
|
||||
})
|
||||
})).Methods(http.MethodPost)
|
||||
|
||||
r.Handle("/logout", authentication.Logout(http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
|
||||
rw.Header().Add("Content-Type", "text/html; charset=utf-8")
|
||||
rw.WriteHeader(http.StatusOK)
|
||||
web.RenderTemplate(rw, r, "login.tmpl", &web.Page{
|
||||
Title: "Bye - ClusterCockpit",
|
||||
MsgType: "alert-info",
|
||||
Message: "Logout successful",
|
||||
Build: buildInfo,
|
||||
})
|
||||
}))).Methods(http.MethodPost)
|
||||
r.Handle("/jwt-login", authentication.Login(
|
||||
// On success:
|
||||
http.RedirectHandler("/", http.StatusTemporaryRedirect),
|
||||
|
||||
// On failure:
|
||||
func(rw http.ResponseWriter, r *http.Request, err error) {
|
||||
rw.Header().Add("Content-Type", "text/html; charset=utf-8")
|
||||
rw.WriteHeader(http.StatusUnauthorized)
|
||||
web.RenderTemplate(rw, "login.tmpl", &web.Page{
|
||||
Title: "Login failed - ClusterCockpit",
|
||||
MsgType: "alert-warning",
|
||||
Message: err.Error(),
|
||||
Build: buildInfo,
|
||||
})
|
||||
}))
|
||||
|
||||
r.Handle("/logout", authentication.Logout(
|
||||
http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
|
||||
rw.Header().Add("Content-Type", "text/html; charset=utf-8")
|
||||
rw.WriteHeader(http.StatusOK)
|
||||
web.RenderTemplate(rw, "login.tmpl", &web.Page{
|
||||
Title: "Bye - ClusterCockpit",
|
||||
MsgType: "alert-info",
|
||||
Message: "Logout successful",
|
||||
Build: buildInfo,
|
||||
})
|
||||
}))).Methods(http.MethodPost)
|
||||
|
||||
secured.Use(func(next http.Handler) http.Handler {
|
||||
return authentication.Auth(
|
||||
@ -378,7 +393,7 @@ func main() {
|
||||
// On failure:
|
||||
func(rw http.ResponseWriter, r *http.Request, err error) {
|
||||
rw.WriteHeader(http.StatusUnauthorized)
|
||||
web.RenderTemplate(rw, r, "login.tmpl", &web.Page{
|
||||
web.RenderTemplate(rw, "login.tmpl", &web.Page{
|
||||
Title: "Authentication failed - ClusterCockpit",
|
||||
MsgType: "alert-danger",
|
||||
Message: err.Error(),
|
||||
|
@ -16,25 +16,41 @@ It is supported to set these by means of a `.env` file in the project root.
|
||||
* `static-files`: Type string. Folder where static assets can be found, if `embed-static-files` is `false`. No default.
|
||||
* `db-driver`: Type string. 'sqlite3' or 'mysql' (mysql will work for mariadb as well). Default `sqlite3`.
|
||||
* `db`: Type string. For sqlite3 a filename, for mysql a DSN in this format: https://github.com/go-sql-driver/mysql#dsn-data-source-name (Without query parameters!). Default: `./var/job.db`.
|
||||
* `job-archive`: Type string. Path to the job-archive. Default: `./var/job-archive`.
|
||||
* `job-archive`: Type object.
|
||||
- `kind`: Type string. At them moment only file is supported as value.
|
||||
- `path`: Type string. Path to the job-archive. Default: `./var/job-archive`.
|
||||
- `compression`: Type integer. Setup automatic compression for jobs older than number of days.
|
||||
- `retention`: Type object.
|
||||
- `policy`: Type string (required). Retention policy. Possible values none, delete,
|
||||
move.
|
||||
- `includeDB`: Type boolean. Also remove jobs from database.
|
||||
- `age`: Type integer. Act on jobs with startTime older than age (in days).
|
||||
- `location`: Type string. The target directory for retention. Only applicable for retention policy move.
|
||||
* `disable-archive`: Type bool. Keep all metric data in the metric data repositories, do not write to the job-archive. Default `false`.
|
||||
* `validate`: Type bool. Validate all input json documents against json schema.
|
||||
* `session-max-age`: Type string. Specifies for how long a session shall be valid as a string parsable by time.ParseDuration(). If 0 or empty, the session/token does not expire! Default `168h`.
|
||||
* `jwt-max-age`: Type string. Specifies for how long a JWT token shall be valid as a string parsable by time.ParseDuration(). If 0 or empty, the session/token does not expire! Default `0`.
|
||||
* `https-cert-file` and `https-key-file`: Type string. If both those options are not empty, use HTTPS using those certificates.
|
||||
* `redirect-http-to`: Type string. If not the empty string and `addr` does not end in ":80", redirect every request incoming at port 80 to that url.
|
||||
* `machine-state-dir`: Type string. Where to store MachineState files. TODO: Explain in more detail!
|
||||
* `stop-jobs-exceeding-walltime`: Type int. If not zero, automatically mark jobs as stopped running X seconds longer than their walltime. Only applies if walltime is set for job. Default `0`.
|
||||
* `short-running-jobs-duration`: Type int. Do not show running jobs shorter than X seconds. Default `300`.
|
||||
* `jwts`: Type object (required). For JWT Authentication.
|
||||
- `max-age`: Type string (required). Configure how long a token is valid. As string parsable by time.ParseDuration().
|
||||
- `cookieName`: Type string. Cookie that should be checked for a JWT token.
|
||||
- `vaidateUser`: Type boolean. Deny login for users not in database (but defined in JWT). Overwrite roles in JWT with database roles.
|
||||
- `trustedIssuer`: Type string. Issuer that should be accepted when validating external JWTs.
|
||||
- `syncUserOnLogin`: Type boolean. Add non-existent user to DB at login attempt with values provided in JWT.
|
||||
* `ldap`: Type object. For LDAP Authentication and user synchronisation. Default `nil`.
|
||||
- `url`: Type string. URL of LDAP directory server.
|
||||
- `user_base`: Type string. Base DN of user tree root.
|
||||
- `search_dn`: Type string. DN for authenticating LDAP admin account with general read rights.
|
||||
- `user_bind`: Type string. Expression used to authenticate users via LDAP bind. Must contain `uid={username}`.
|
||||
- `user_filter`: Type string. Filter to extract users for syncing.
|
||||
- `url`: Type string (required). URL of LDAP directory server.
|
||||
- `user_base`: Type string (required). Base DN of user tree root.
|
||||
- `search_dn`: Type string (required). DN for authenticating LDAP admin account with general read rights.
|
||||
- `user_bind`: Type string (required). Expression used to authenticate users via LDAP bind. Must contain `uid={username}`.
|
||||
- `user_filter`: Type string (required). Filter to extract users for syncing.
|
||||
- `username_attr`: Type string. Attribute with full user name. Defaults to `gecos` if not provided.
|
||||
- `sync_interval`: Type string. Interval used for syncing local user table with LDAP directory. Parsed using time.ParseDuration.
|
||||
- `sync_del_old_users`: Type bool. Delete obsolete users in database.
|
||||
* `clusters`: Type array of objects
|
||||
- `sync_del_old_users`: Type boolean. Delete obsolete users in database.
|
||||
- `syncUserOnLogin`: Type boolean. Add non-existent user to DB at login attempt if user exists in Ldap directory.
|
||||
* `clusters`: Type array of objects (required)
|
||||
- `name`: Type string. The name of the cluster.
|
||||
- `metricDataRepository`: Type object with properties: `kind` (Type string, can be one of `cc-metric-store`, `influxdb` ), `url` (Type string), `token` (Type string)
|
||||
- `filterRanges` Type object. This option controls the slider ranges for the UI controls of numNodes, duration, and startTime. Example:
|
||||
|
@ -4,6 +4,9 @@
|
||||
"kind": "file",
|
||||
"path": "./var/job-archive"
|
||||
},
|
||||
"jwts": {
|
||||
"max-age": "2m"
|
||||
},
|
||||
"clusters": [
|
||||
{
|
||||
"name": "fritz",
|
||||
|
@ -5,7 +5,7 @@
|
||||
"user_base": "ou=people,ou=hpc,dc=test,dc=de",
|
||||
"search_dn": "cn=hpcmonitoring,ou=roadm,ou=profile,ou=hpc,dc=test,dc=de",
|
||||
"user_bind": "uid={username},ou=people,ou=hpc,dc=test,dc=de",
|
||||
"user_filter": "(&(objectclass=posixAccount)(uid=*))"
|
||||
"user_filter": "(&(objectclass=posixAccount))"
|
||||
},
|
||||
"https-cert-file": "/etc/letsencrypt/live/url/fullchain.pem",
|
||||
"https-key-file": "/etc/letsencrypt/live/url/privkey.pem",
|
||||
@ -42,9 +42,9 @@
|
||||
],
|
||||
"jwts": {
|
||||
"cookieName": "",
|
||||
"forceJWTValidationViaDatabase": false,
|
||||
"max-age": 0,
|
||||
"trustedExternalIssuer": ""
|
||||
"validateUser": false,
|
||||
"max-age": "2m",
|
||||
"trustedIssuer": ""
|
||||
},
|
||||
"short-running-jobs-duration": 300
|
||||
}
|
||||
|
@ -1,11 +1,13 @@
|
||||
## Introduction
|
||||
|
||||
ClusterCockpit uses JSON Web Tokens (JWT) for authorization of its APIs.
|
||||
JSON Web Token (JWT) is an open standard (RFC 7519) that defines a compact and self-contained way for securely transmitting information between parties as a JSON object.
|
||||
This information can be verified and trusted because it is digitally signed.
|
||||
In ClusterCockpit JWTs are signed using a public/private key pair using ECDSA.
|
||||
Because tokens are signed using public/private key pairs, the signature also certifies that only the party holding the private key is the one that signed it.
|
||||
Currently JWT tokens in ClusterCockpit not yet expire.
|
||||
ClusterCockpit uses JSON Web Tokens (JWT) for authorization of its APIs. JSON
|
||||
Web Token (JWT) is an open standard (RFC 7519) that defines a compact and
|
||||
self-contained way for securely transmitting information between parties as a
|
||||
JSON object. This information can be verified and trusted because it is
|
||||
digitally signed. In ClusterCockpit JWTs are signed using a public/private key
|
||||
pair using ECDSA. Because tokens are signed using public/private key pairs, the
|
||||
signature also certifies that only the party holding the private key is the one
|
||||
that signed it. Token expiration is set to the configuration option MaxAge.
|
||||
|
||||
## JWT Payload
|
||||
|
||||
@ -25,8 +27,14 @@ $ ./gen-keypair
|
||||
2. Add keypair in your `.env` file. A template can be found in `./configs`.
|
||||
|
||||
There are two usage scenarios:
|
||||
* The APIs are used during a browser session. In this case on login a JWT token is issued on login, that is used by the web frontend to authorize against the GraphQL and REST APIs.
|
||||
* The REST API is used outside a browser session, e.g. by scripts. In this case you have to issue a token manually. This possible from within the configuration view or on the command line. It is recommended to issue a JWT token in this case for a special user that only has the `api` role. By using different users for different purposes a fine grained access control and access revocation management is possible.
|
||||
* The APIs are used during a browser session. API accesses are authorized with
|
||||
the active session.
|
||||
* The REST API is used outside a browser session, e.g. by scripts. In this case
|
||||
you have to issue a token manually. This possible from within the
|
||||
configuration view or on the command line. It is recommended to issue a JWT
|
||||
token in this case for a special user that only has the `api` role. By using
|
||||
different users for different purposes a fine grained access control and
|
||||
access revocation management is possible.
|
||||
|
||||
The token is commonly specified in the Authorization HTTP header using the Bearer schema.
|
||||
|
||||
@ -46,16 +54,24 @@ $ curl -X GET "<API ENDPOINT>" -H "accept: application/json" -H "Content-Type:
|
||||
```
|
||||
|
||||
## Accept externally generated JWTs provided via cookie
|
||||
If there is an external service like an AuthAPI that can generate JWTs and hand them over to ClusterCockpit via cookies, CC can be configured to accept them:
|
||||
If there is an external service like an AuthAPI that can generate JWTs and hand
|
||||
them over to ClusterCockpit via cookies, CC can be configured to accept them:
|
||||
|
||||
1. `.env`: CC needs a public ed25519 key to verify foreign JWT signatures. Public keys in PEM format can be converted with the instructions in [/tools/convert-pem-pubkey-for-cc](../tools/convert-pem-pubkey-for-cc/Readme.md) .
|
||||
1. `.env`: CC needs a public ed25519 key to verify foreign JWT signatures.
|
||||
Public keys in PEM format can be converted with the instructions in
|
||||
[/tools/convert-pem-pubkey-for-cc](../tools/convert-pem-pubkey-for-cc/Readme.md)
|
||||
.
|
||||
|
||||
```
|
||||
CROSS_LOGIN_JWT_PUBLIC_KEY="+51iXX8BdLFocrppRxIw52xCOf8xFSH/eNilN5IHVGc="
|
||||
```
|
||||
|
||||
2. `config.json`: Insert a name for the cookie (set by the external service) containing the JWT so that CC knows where to look at. Define a trusted issuer (JWT claim 'iss'), otherwise it will be rejected.
|
||||
If you want usernames and user roles from JWTs ('sub' and 'roles' claim) to be validated against CC's internal database, you need to enable it here. Unknown users will then be rejected and roles set via JWT will be ignored.
|
||||
2. `config.json`: Insert a name for the cookie (set by the external service)
|
||||
containing the JWT so that CC knows where to look at. Define a trusted issuer
|
||||
(JWT claim 'iss'), otherwise it will be rejected. If you want usernames and
|
||||
user roles from JWTs ('sub' and 'roles' claim) to be validated against CC's
|
||||
internal database, you need to enable it here. Unknown users will then be
|
||||
rejected and roles set via JWT will be ignored.
|
||||
|
||||
```json
|
||||
"jwts": {
|
||||
@ -65,7 +81,8 @@ If you want usernames and user roles from JWTs ('sub' and 'roles' claim) to be v
|
||||
}
|
||||
```
|
||||
|
||||
3. Make sure your external service includes the same issuer (`iss`) in its JWTs. Example JWT payload:
|
||||
3. Make sure your external service includes the same issuer (`iss`) in its JWTs.
|
||||
Example JWT payload:
|
||||
|
||||
```json
|
||||
{
|
||||
|
@ -1,19 +1,24 @@
|
||||
# Overview
|
||||
|
||||
The implementation of authentication is not easy to understand by just looking
|
||||
at the code. The authentication is implemented in `internal/auth/`. In `auth.go`
|
||||
The authentication is implemented in `internal/auth/`. In `auth.go`
|
||||
an interface is defined that any authentication provider must fulfill. It also
|
||||
acts as a dispatcher to delegate the calls to the available authentication
|
||||
providers.
|
||||
|
||||
The most important routine are:
|
||||
* `CanLogin()` Check if the authentication method is supported for login attempt
|
||||
Two authentication types are available:
|
||||
* JWT authentication for the REST API that does not create a session cookie
|
||||
* Session based authentication using a session cookie
|
||||
|
||||
The most important routines in auth are:
|
||||
* `Login()` Handle POST request to login user and start a new session
|
||||
* `Auth()` Authenticate user and put User Object in context of the request
|
||||
|
||||
The http router calls auth in the following cases:
|
||||
* `r.Handle("/login", authentication.Login( ... )).Methods(http.MethodPost)`:
|
||||
The POST request on the `/login` route will call the Login callback.
|
||||
* `r.Handle("/jwt-login", authentication.Login( ... ))`:
|
||||
Any request on the `/jwt-login` route will call the Login callback. Intended
|
||||
for use for the JWT token based authenticators.
|
||||
* Any route in the secured subrouter will always call Auth(), on success it will
|
||||
call the next handler in the chain, on failure it will render the login
|
||||
template.
|
||||
@ -30,10 +35,9 @@ secured.Use(func(next http.Handler) http.Handler {
|
||||
})
|
||||
```
|
||||
|
||||
For non API routes a JWT token can be used to initiate an authenticated user
|
||||
A JWT token can be used to initiate an authenticated user
|
||||
session. This can either happen by calling the login route with a token
|
||||
provided in a header or query URL or via the `Auth()` method on first access
|
||||
to a secured URL via a special cookie containing the JWT token.
|
||||
provided in a header or via a special cookie containing the JWT token.
|
||||
For API routes the access is authenticated on every request using the JWT token
|
||||
and no session is initiated.
|
||||
|
||||
@ -43,9 +47,9 @@ The Login function (located in `auth.go`):
|
||||
* Extracts the user name and gets the user from the user database table. In case the
|
||||
user is not found the user object is set to nil.
|
||||
* Iterates over all authenticators and:
|
||||
- Calls the `CanLogin` function which checks if the authentication method is
|
||||
supported for this user and the user object is valid.
|
||||
- Calls the `Login` function to authenticate the user. On success a valid user
|
||||
- Calls its `CanLogin` function which checks if the authentication method is
|
||||
supported for this user.
|
||||
- Calls its `Login` function to authenticate the user. On success a valid user
|
||||
object is returned.
|
||||
- Creates a new session object, stores the user attributes in the session and
|
||||
saves the session.
|
||||
@ -63,94 +67,114 @@ the user database table:
|
||||
```
|
||||
if e := bcrypt.CompareHashAndPassword([]byte(user.Password), []byte(r.FormValue("password"))); e != nil {
|
||||
log.Errorf("AUTH/LOCAL > Authentication for user %s failed!", user.Username)
|
||||
return nil, fmt.Errorf("AUTH/LOCAL > Authentication failed")
|
||||
return nil, fmt.Errorf("Authentication failed")
|
||||
}
|
||||
```
|
||||
|
||||
## LDAP authenticator
|
||||
|
||||
This authenticator is applied if
|
||||
This authenticator is applied if the user was found in the database and its
|
||||
AuthSource is LDAP:
|
||||
```
|
||||
return user != nil && user.AuthSource == AuthViaLDAP
|
||||
if user != nil {
|
||||
if user.AuthSource == schema.AuthViaLDAP {
|
||||
return user, true
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
If the option `SyncUserOnLogin` is set it tried to sync the user from the LDAP
|
||||
directory. In case this succeeds the user is persisted to the database and can
|
||||
login.
|
||||
|
||||
Gets the LDAP connection and tries a bind with the provided credentials:
|
||||
```
|
||||
if err := l.Bind(userDn, r.FormValue("password")); err != nil {
|
||||
log.Errorf("AUTH/LOCAL > Authentication for user %s failed: %v", user.Username, err)
|
||||
return nil, fmt.Errorf("AUTH/LDAP > Authentication failed")
|
||||
log.Errorf("AUTH/LDAP > Authentication for user %s failed: %v", user.Username, err)
|
||||
return nil, fmt.Errorf("Authentication failed")
|
||||
}
|
||||
```
|
||||
|
||||
## JWT authenticator
|
||||
## JWT Session authenticator
|
||||
|
||||
Login via JWT token will create a session without password.
|
||||
For login the `X-Auth-Token` header is not supported.
|
||||
This authenticator is applied if either user is not nil and auth source is
|
||||
`AuthViaToken` or the Authorization header is present or the URL query key
|
||||
login-token is present:
|
||||
For login the `X-Auth-Token` header is not supported. This authenticator is
|
||||
applied if the Authorization header or query parameter login-token is present:
|
||||
```
|
||||
return (user != nil && user.AuthSource == AuthViaToken) ||
|
||||
r.Header.Get("Authorization") != "" ||
|
||||
r.URL.Query().Get("login-token") != ""
|
||||
return user, r.Header.Get("Authorization") != "" ||
|
||||
r.URL.Query().Get("login-token") != ""
|
||||
```
|
||||
|
||||
The Login function:
|
||||
* Parses the token
|
||||
* Parses the token and checks if it is expired
|
||||
* Check if the signing method is EdDSA or HS256 or HS512
|
||||
* Check if claims are valid and extracts the claims
|
||||
* The following claims have to be present:
|
||||
- `sub`: The subject, in this case this is the username
|
||||
- `exp`: Expiration in Unix epoch time
|
||||
- `roles`: String array with roles of user
|
||||
* In case user is not yet set, which is usually the case:
|
||||
- Try to fetch user from database
|
||||
- In case user is not yet present add user to user database table with `AuthViaToken` AuthSource.
|
||||
* In case user does not exist in the database and the option `SyncUserOnLogin`
|
||||
is set add user to user database table with `AuthViaToken` AuthSource.
|
||||
* Return valid user object
|
||||
|
||||
## JWT Cookie Session authenticator
|
||||
|
||||
Login via JWT cookie token will create a session without password.
|
||||
It is first checked if the required configuration options are set:
|
||||
* `trustedIssuer`
|
||||
* `CookieName`
|
||||
|
||||
and optionally the environment variable `CROSS_LOGIN_JWT_PUBLIC_KEY` is set.
|
||||
|
||||
This authenticator is applied if the configured cookie is present:
|
||||
```
|
||||
jwtCookie, err := r.Cookie(cookieName)
|
||||
|
||||
if err == nil && jwtCookie.Value != "" {
|
||||
return true
|
||||
}
|
||||
```
|
||||
|
||||
The Login function:
|
||||
* Extracts and parses the token
|
||||
* Checks if signing method is Ed25519/EdDSA
|
||||
* In case publicKeyCrossLogin is configured:
|
||||
- Check if `iss` issuer claim matched trusted issuer from configuration
|
||||
- Return public cross login key
|
||||
- Otherwise return standard public key
|
||||
* Check if claims are valid
|
||||
* Depending on the option `validateUser` the roles are
|
||||
extracted from JWT token or taken from user object fetched from database
|
||||
* Ask browser to delete the JWT cookie
|
||||
* In case user does not exist in the database and the option `SyncUserOnLogin`
|
||||
is set add user to user database table with `AuthViaToken` AuthSource.
|
||||
* Return valid user object
|
||||
|
||||
# Auth
|
||||
|
||||
The Auth function (located in `auth.go`):
|
||||
* Returns a new http handler function that is defined right away
|
||||
* This handler iterates over all authenticators
|
||||
* Calls `Auth()` on every authenticator
|
||||
* This handler tries two methods to authenticate a user:
|
||||
- Via a JWT API token in `AuthViaJWT()`
|
||||
- Via a valid session in `AuthViaSession()`
|
||||
* If err is not nil and the user object is valid it puts the user object in the
|
||||
request context and starts the onSuccess http handler
|
||||
* Otherwise it calls the onFailure handler
|
||||
|
||||
## Local
|
||||
## AuthViaJWT
|
||||
|
||||
Calls the `AuthViaSession()` function in `auth.go`. This will extract username,
|
||||
projects and roles from the session and initialize a user object with those
|
||||
values.
|
||||
Implemented in JWTAuthenticator:
|
||||
* Extract token either from header `X-Auth-Token` or `Authorization` with Bearer
|
||||
prefix
|
||||
* Parse token and check if it is valid. The Parse routine will also check if the
|
||||
token is expired.
|
||||
* If the option `validateUser` is set it will ensure the
|
||||
user object exists in the database and takes the roles from the database user
|
||||
* Otherwise the roles are extracted from the roles claim
|
||||
* Returns a valid user object with AuthType set to AuthToken
|
||||
|
||||
## LDAP
|
||||
|
||||
Calls the `AuthViaSession()` function in `auth.go`. This will extract username,
|
||||
projects and roles from the session and initialize a user object with those
|
||||
values.
|
||||
|
||||
# JWT
|
||||
|
||||
Check for JWT token:
|
||||
* Is token passed in the `X-Auth-Token` or `Authorization` header
|
||||
* If no token is found in a header it tries to read the token from a configured
|
||||
cookie.
|
||||
|
||||
Finally it calls AuthViaSession in `auth.go` if a valid session exists. This is
|
||||
true if a JWT token was previously used to initiate a session. In this case the
|
||||
user object initialized with the session is returned right away.
|
||||
|
||||
In case a token was found extract and parse the token:
|
||||
* Check if signing method is Ed25519/EdDSA
|
||||
* In case publicKeyCrossLogin is configured:
|
||||
- Check if `iss` issuer claim matched trusted issuer from configuration
|
||||
- Return public cross login key
|
||||
- Otherwise return standard public key
|
||||
* Check if claims are valid
|
||||
* Depending on the option `ForceJWTValidationViaDatabase ` the roles are
|
||||
extracted from JWT token or taken from user object fetched from database
|
||||
* In case the token was extracted from cookie create a new session and ask the
|
||||
browser to delete the JWT cookie
|
||||
* Return valid user object
|
||||
## AuthViaSession
|
||||
|
||||
* Extracts session
|
||||
* Get values username, projects, and roles from session
|
||||
* Returns a valid user object with AuthType set to AuthSession
|
||||
|
32
go.mod
32
go.mod
@ -3,7 +3,7 @@ module github.com/ClusterCockpit/cc-backend
|
||||
go 1.18
|
||||
|
||||
require (
|
||||
github.com/99designs/gqlgen v0.17.24
|
||||
github.com/99designs/gqlgen v0.17.36
|
||||
github.com/ClusterCockpit/cc-units v0.4.0
|
||||
github.com/Masterminds/squirrel v1.5.3
|
||||
github.com/go-co-op/gocron v1.25.0
|
||||
@ -23,9 +23,9 @@ require (
|
||||
github.com/qustavo/sqlhooks/v2 v2.1.0
|
||||
github.com/santhosh-tekuri/jsonschema/v5 v5.2.0
|
||||
github.com/swaggo/http-swagger v1.3.3
|
||||
github.com/swaggo/swag v1.8.10
|
||||
github.com/vektah/gqlparser/v2 v2.5.1
|
||||
golang.org/x/crypto v0.6.0
|
||||
github.com/swaggo/swag v1.16.1
|
||||
github.com/vektah/gqlparser/v2 v2.5.8
|
||||
golang.org/x/crypto v0.12.0
|
||||
golang.org/x/exp v0.0.0-20230510235704-dd950f8aeaea
|
||||
)
|
||||
|
||||
@ -40,19 +40,18 @@ require (
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.2 // indirect
|
||||
github.com/deepmap/oapi-codegen v1.12.4 // indirect
|
||||
github.com/felixge/httpsnoop v1.0.3 // indirect
|
||||
github.com/ghodss/yaml v1.0.0 // indirect
|
||||
github.com/go-asn1-ber/asn1-ber v1.5.4 // indirect
|
||||
github.com/go-openapi/jsonpointer v0.19.6 // indirect
|
||||
github.com/go-openapi/jsonpointer v0.20.0 // indirect
|
||||
github.com/go-openapi/jsonreference v0.20.2 // indirect
|
||||
github.com/go-openapi/spec v0.20.8 // indirect
|
||||
github.com/go-openapi/swag v0.22.3 // indirect
|
||||
github.com/go-openapi/spec v0.20.9 // indirect
|
||||
github.com/go-openapi/swag v0.22.4 // indirect
|
||||
github.com/golang/protobuf v1.5.2 // indirect
|
||||
github.com/google/uuid v1.3.0 // indirect
|
||||
github.com/gorilla/securecookie v1.1.1 // indirect
|
||||
github.com/gorilla/websocket v1.5.0 // indirect
|
||||
github.com/hashicorp/errwrap v1.1.0 // indirect
|
||||
github.com/hashicorp/go-multierror v1.1.1 // indirect
|
||||
github.com/hashicorp/golang-lru v0.5.4 // indirect
|
||||
github.com/hashicorp/golang-lru/v2 v2.0.3 // indirect
|
||||
github.com/influxdata/line-protocol v0.0.0-20210922203350-b1ad95c89adf // indirect
|
||||
github.com/josharian/intern v1.0.0 // indirect
|
||||
github.com/jpillora/backoff v1.0.0 // indirect
|
||||
@ -76,17 +75,18 @@ require (
|
||||
github.com/robfig/cron/v3 v3.0.1 // indirect
|
||||
github.com/russross/blackfriday/v2 v2.1.0 // indirect
|
||||
github.com/swaggo/files v1.0.0 // indirect
|
||||
github.com/urfave/cli/v2 v2.24.4 // indirect
|
||||
github.com/urfave/cli/v2 v2.25.7 // indirect
|
||||
github.com/xrash/smetrics v0.0.0-20201216005158-039620a65673 // indirect
|
||||
go.uber.org/atomic v1.10.0 // indirect
|
||||
golang.org/x/mod v0.8.0 // indirect
|
||||
golang.org/x/net v0.7.0 // indirect
|
||||
golang.org/x/mod v0.12.0 // indirect
|
||||
golang.org/x/net v0.14.0 // indirect
|
||||
golang.org/x/oauth2 v0.5.0 // indirect
|
||||
golang.org/x/sys v0.5.0 // indirect
|
||||
golang.org/x/text v0.7.0 // indirect
|
||||
golang.org/x/tools v0.6.0 // indirect
|
||||
golang.org/x/sys v0.11.0 // indirect
|
||||
golang.org/x/text v0.12.0 // indirect
|
||||
golang.org/x/tools v0.12.0 // indirect
|
||||
google.golang.org/appengine v1.6.7 // indirect
|
||||
google.golang.org/protobuf v1.28.1 // indirect
|
||||
google.golang.org/protobuf v1.30.0 // indirect
|
||||
gopkg.in/yaml.v2 v2.4.0 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||
sigs.k8s.io/yaml v1.3.0 // indirect
|
||||
)
|
||||
|
88
go.sum
88
go.sum
@ -50,8 +50,8 @@ cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RX
|
||||
cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0=
|
||||
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
|
||||
gioui.org v0.0.0-20210308172011-57750fc8a0a6/go.mod h1:RSH6KIUZ0p2xy5zHDxgAM4zumjgTw83q2ge/PI+yyw8=
|
||||
github.com/99designs/gqlgen v0.17.24 h1:pcd/HFIoSdRvyADYQG2dHvQN2KZqX/nXzlVm6TMMq7E=
|
||||
github.com/99designs/gqlgen v0.17.24/go.mod h1:BMhYIhe4bp7OlCo5I2PnowSK/Wimpv/YlxfNkqZGwLo=
|
||||
github.com/99designs/gqlgen v0.17.36 h1:u/o/rv2SZ9s5280dyUOOrkpIIkr/7kITMXYD3rkJ9go=
|
||||
github.com/99designs/gqlgen v0.17.36/go.mod h1:6RdyY8puhCoWAQVr2qzF2OMVfudQzc8ACxzpzluoQm4=
|
||||
github.com/AdaLogics/go-fuzz-headers v0.0.0-20210715213245-6c3934b029d8/go.mod h1:CzsSbkDixRphAF5hS6wbMKq0eI6ccJRb7/A0M6JBnwg=
|
||||
github.com/Azure/azure-pipeline-go v0.2.3/go.mod h1:x841ezTBIMG6O3lAcl8ATHnsOPVl2bqk7S3ta6S6u4k=
|
||||
github.com/Azure/azure-sdk-for-go v16.2.1+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc=
|
||||
@ -78,7 +78,6 @@ github.com/Azure/go-ntlmssp v0.0.0-20220621081337-cb9428e4ac1e/go.mod h1:chxPXzS
|
||||
github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358 h1:mFRzDkZVAjdal+s7s0MwaRv9igoPqLRdzOLzw/8Xvq8=
|
||||
github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358/go.mod h1:chxPXzSsl7ZWRAuOIE23GDNzjWuZquvFlgA8xmpunjU=
|
||||
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
||||
github.com/BurntSushi/toml v1.1.0/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ=
|
||||
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
|
||||
github.com/ClickHouse/clickhouse-go v1.4.3/go.mod h1:EaI/sW7Azgz9UATzd5ZdZHRUhHgv5+JMS9NSr2smCJI=
|
||||
github.com/ClusterCockpit/cc-units v0.4.0 h1:zP5DOu99GmErW0tCDf0gcLrlWt42RQ9dpoONEOh4cI0=
|
||||
@ -120,7 +119,6 @@ github.com/PuerkitoBio/urlesc v0.0.0-20160726150825-5bd2802263f2/go.mod h1:uGdko
|
||||
github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE=
|
||||
github.com/RaveNoX/go-jsoncommentstrip v1.0.0/go.mod h1:78ihd09MekBnJnxpICcwzCMzGrKSKYe4AqU6PDYYpjk=
|
||||
github.com/Shopify/logrus-bugsnag v0.0.0-20171204204709-577dee27f20d/go.mod h1:HI8ITrYtUY+O+ZhtlqUnD8+KwNPOyugEhfP9fdUIaEQ=
|
||||
github.com/agnivade/levenshtein v1.0.1/go.mod h1:CURSv5d9Uaml+FovSIICkLbAUZ9S4RqaHDIsdSBg7lM=
|
||||
github.com/agnivade/levenshtein v1.1.1 h1:QY8M92nrzkmr798gCo3kmMyqXFzdQVpxLlGPRBij0P8=
|
||||
github.com/agnivade/levenshtein v1.1.1/go.mod h1:veldBMzWxcCG2ZvUTKD2kJNRdCk5hVbJomOvKkmgYbo=
|
||||
github.com/ajstarks/svgo v0.0.0-20180226025133-644b8db467af/go.mod h1:K08gAheRH3/J6wwsYMMT4xOr94bZjxIelGM0+d/wbFw=
|
||||
@ -355,7 +353,6 @@ github.com/coreos/pkg v0.0.0-20160727233714-3ac0863d7acf/go.mod h1:E3G3o1h8I7cfc
|
||||
github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.1/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.2 h1:p1EgwI/C7NhT0JmVkwCD2ZBK8j4aeHQX2pMHHBfMQ6w=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
|
||||
github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY=
|
||||
@ -442,7 +439,6 @@ github.com/gabriel-vasile/mimetype v1.4.0/go.mod h1:fA8fi6KUiG7MgQQ+mEWotXoEOvmx
|
||||
github.com/garyburd/redigo v0.0.0-20150301180006-535138d7bcd7/go.mod h1:NR3MbYisc3/PwhQ00EMzDiPmrwpPxAn5GI05/YaO1SY=
|
||||
github.com/getsentry/raven-go v0.2.0/go.mod h1:KungGk8q33+aIAZUIVWZDr2OfAEBsO49PX4NzFV5kcQ=
|
||||
github.com/ghodss/yaml v0.0.0-20150909031657-73d445a93680/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
|
||||
github.com/ghodss/yaml v1.0.0 h1:wQHKEahhL6wmXdzwWG11gIVCkOv05bNOh+Rxn0yngAk=
|
||||
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
|
||||
github.com/go-asn1-ber/asn1-ber v1.5.4 h1:vXT6d/FNDiELJnLb6hGNa309LMsrCoYFvpwHDF0+Y1A=
|
||||
github.com/go-asn1-ber/asn1-ber v1.5.4/go.mod h1:hEBeB/ic+5LoWskz+yKT7vGhhPYkProFKoKdwZRWMe0=
|
||||
@ -478,8 +474,9 @@ github.com/go-openapi/jsonpointer v0.0.0-20160704185906-46af16f9f7b1/go.mod h1:+
|
||||
github.com/go-openapi/jsonpointer v0.19.2/go.mod h1:3akKfEdA7DF1sugOqz1dVQHBcuDBPKZGEoHC/NkiQRg=
|
||||
github.com/go-openapi/jsonpointer v0.19.3/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg=
|
||||
github.com/go-openapi/jsonpointer v0.19.5/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg=
|
||||
github.com/go-openapi/jsonpointer v0.19.6 h1:eCs3fxoIi3Wh6vtgmLTOjdhSpiqphQ+DaPn38N2ZdrE=
|
||||
github.com/go-openapi/jsonpointer v0.19.6/go.mod h1:osyAmYz/mB/C3I+WsTTSgw1ONzaLJoLCyoi6/zppojs=
|
||||
github.com/go-openapi/jsonpointer v0.20.0 h1:ESKJdU9ASRfaPNOPRx12IUyA1vn3R9GiE3KYD14BXdQ=
|
||||
github.com/go-openapi/jsonpointer v0.20.0/go.mod h1:6PGzBjjIIumbLYysB73Klnms1mwnU4G3YHOECG3CedA=
|
||||
github.com/go-openapi/jsonreference v0.0.0-20160704190145-13c6e3589ad9/go.mod h1:W3Z9FmVs9qj+KR4zFKmDPGiLdk1D9Rlm7cyMvf57TTg=
|
||||
github.com/go-openapi/jsonreference v0.19.2/go.mod h1:jMjeRr2HHw6nAVajTXJ4eiUwohSTlpa0o73RUL1owJc=
|
||||
github.com/go-openapi/jsonreference v0.19.3/go.mod h1:rjx6GuL8TTa9VaixXglHmQmIL98+wF9xc8zWvFonSJ8=
|
||||
@ -489,15 +486,16 @@ github.com/go-openapi/jsonreference v0.20.2 h1:3sVjiK66+uXK/6oQ8xgcRKcFgQ5KXa2Kv
|
||||
github.com/go-openapi/jsonreference v0.20.2/go.mod h1:Bl1zwGIM8/wsvqjsOQLJ/SH+En5Ap4rVB5KVcIDZG2k=
|
||||
github.com/go-openapi/spec v0.0.0-20160808142527-6aced65f8501/go.mod h1:J8+jY1nAiCcj+friV/PDoE1/3eeccG9LYBs0tYvLOWc=
|
||||
github.com/go-openapi/spec v0.19.3/go.mod h1:FpwSN1ksY1eteniUU7X0N/BgJ7a4WvBFVA8Lj9mJglo=
|
||||
github.com/go-openapi/spec v0.20.8 h1:ubHmXNY3FCIOinT8RNrrPfGc9t7I1qhPtdOGoG2AxRU=
|
||||
github.com/go-openapi/spec v0.20.8/go.mod h1:2OpW+JddWPrpXSCIX8eOx7lZ5iyuWj3RYR6VaaBKcWA=
|
||||
github.com/go-openapi/spec v0.20.9 h1:xnlYNQAwKd2VQRRfwTEI0DcK+2cbuvI/0c7jx3gA8/8=
|
||||
github.com/go-openapi/spec v0.20.9/go.mod h1:2OpW+JddWPrpXSCIX8eOx7lZ5iyuWj3RYR6VaaBKcWA=
|
||||
github.com/go-openapi/swag v0.0.0-20160704191624-1d0bd113de87/go.mod h1:DXUve3Dpr1UfpPtxFw+EFuQ41HhCWZfha5jSVRG7C7I=
|
||||
github.com/go-openapi/swag v0.19.2/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk=
|
||||
github.com/go-openapi/swag v0.19.5/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk=
|
||||
github.com/go-openapi/swag v0.19.14/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ=
|
||||
github.com/go-openapi/swag v0.19.15/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ=
|
||||
github.com/go-openapi/swag v0.22.3 h1:yMBqmnQ0gyZvEb/+KzuWZOXgllrXT4SADYbvDaXHv/g=
|
||||
github.com/go-openapi/swag v0.22.3/go.mod h1:UzaqsxGiab7freDnrUUra0MwWfN/q7tE4j+VcZ0yl14=
|
||||
github.com/go-openapi/swag v0.22.4 h1:QLMzNJnMGPRNDCbySlcj1x01tzU8/9LTTL9hZZZogBU=
|
||||
github.com/go-openapi/swag v0.22.4/go.mod h1:UzaqsxGiab7freDnrUUra0MwWfN/q7tE4j+VcZ0yl14=
|
||||
github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w=
|
||||
github.com/go-sql-driver/mysql v1.4.1/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w=
|
||||
github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
|
||||
@ -704,8 +702,8 @@ github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/b
|
||||
github.com/hashicorp/go.net v0.0.1/go.mod h1:hjKkEWcCURg++eb33jQU7oqQcI9XDCnUzHA0oac0k90=
|
||||
github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
|
||||
github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
|
||||
github.com/hashicorp/golang-lru v0.5.4 h1:YDjusn29QI/Das2iO9M0BHnIbxPeyuCHsjMW+lJfyTc=
|
||||
github.com/hashicorp/golang-lru v0.5.4/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4=
|
||||
github.com/hashicorp/golang-lru/v2 v2.0.3 h1:kmRrRLlInXvng0SmLxmQpQkpbYAvcXm7NPDrgxJa9mE=
|
||||
github.com/hashicorp/golang-lru/v2 v2.0.3/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM=
|
||||
github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ=
|
||||
github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64=
|
||||
github.com/hashicorp/mdns v1.0.0/go.mod h1:tL+uN++7HEJ6SQLQ2/p+z2pH24WQKWjBPkE0mNTz8vQ=
|
||||
@ -810,7 +808,6 @@ github.com/kardianos/osext v0.0.0-20190222173326-2bc1f35cddc0/go.mod h1:1NbS8ALr
|
||||
github.com/karrick/godirwalk v1.8.0/go.mod h1:H5KPZjojv4lE+QYImBI8xVtrBRgYrIVsaRPx4tDPEn4=
|
||||
github.com/karrick/godirwalk v1.10.3/go.mod h1:RoGL9dQei4vP9ilrpETWE8CLOZ1kiN0LhBygSwrAsHA=
|
||||
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51/go.mod h1:CzGEWj7cYgsdH8dAjBGEr58BoE7ScuLd+fwFZ44+/x8=
|
||||
github.com/kevinmbeaulieu/eq-go v1.0.0/go.mod h1:G3S8ajA56gKBZm4UB9AOyoOS37JO3roToPzKNM8dtdM=
|
||||
github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q=
|
||||
github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQLJ+jE2L00=
|
||||
github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8=
|
||||
@ -831,7 +828,7 @@ github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFB
|
||||
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
|
||||
github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI=
|
||||
github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI=
|
||||
github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0=
|
||||
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
|
||||
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
||||
github.com/kr/pty v1.1.5/go.mod h1:9r2w37qlBe7rQ6e1fg1S/9xpWHSnaqNdHD3WcMdbPDA=
|
||||
github.com/kr/pty v1.1.8/go.mod h1:O1sed60cT9XZ5uDucP5qwvh+TE3NnUj51EiZO/lmSfw=
|
||||
@ -851,7 +848,6 @@ github.com/lib/pq v1.8.0/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
|
||||
github.com/lib/pq v1.10.0 h1:Zx5DJFEYQXio93kgXnQ09fXNiUKsqv4OUEu2UtGcB1E=
|
||||
github.com/lib/pq v1.10.0/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
|
||||
github.com/linuxkit/virtsock v0.0.0-20201010232012-f8cee7dfc7a3/go.mod h1:3r6x7q95whyfWQpmGZTu3gk3v2YkMi05HEzl7Tf7YEo=
|
||||
github.com/logrusorgru/aurora/v3 v3.0.0/go.mod h1:vsR12bk5grlLvLXAYrBsb5Oc/N+LxAlxggSjiwMnCUc=
|
||||
github.com/lyft/protoc-gen-star v0.5.3/go.mod h1:V0xaHgaf5oCCqmcxYcWiDfTiKsZsRc87/1qhoTACD8w=
|
||||
github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ=
|
||||
github.com/magiconair/properties v1.8.1/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ=
|
||||
@ -866,12 +862,10 @@ github.com/markbates/oncer v0.0.0-20181203154359-bf2de49a0be2/go.mod h1:Ld9puTsI
|
||||
github.com/markbates/pkger v0.15.1/go.mod h1:0JoVlrol20BSywW79rN3kdFFsE5xYM+rSCQDXbLhiuI=
|
||||
github.com/markbates/safe v1.0.1/go.mod h1:nAqgmRi7cY2nqMc92/bSEeQA+R4OheNU2T1kNSCBdG0=
|
||||
github.com/marstr/guid v1.1.0/go.mod h1:74gB1z2wpxxInTG6yaqA7KrtM0NZ+RbrcqDvYHefzho=
|
||||
github.com/matryer/moq v0.2.7/go.mod h1:kITsx543GOENm48TUAQyJ9+SAvFSr7iGQXPoth/VUBk=
|
||||
github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU=
|
||||
github.com/mattn/go-colorable v0.1.1/go.mod h1:FuOcm+DKB9mbwrcAfNl7/TZVBZ6rcnceauSikq3lYCQ=
|
||||
github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE=
|
||||
github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc=
|
||||
github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
|
||||
github.com/mattn/go-ieproxy v0.0.1/go.mod h1:pYabZ6IHcRpFh7vIaLfK7rdcWgFEb3SFJ6/gNWuh88E=
|
||||
github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4=
|
||||
github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4=
|
||||
@ -880,7 +874,6 @@ github.com/mattn/go-isatty v0.0.7/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hd
|
||||
github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
|
||||
github.com/mattn/go-isatty v0.0.9/go.mod h1:YNRxwqDuOph6SZLI9vUUz6OYw3QyUt7WiY2yME+cCiQ=
|
||||
github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
|
||||
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
|
||||
github.com/mattn/go-runewidth v0.0.2/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU=
|
||||
github.com/mattn/go-shellwords v1.0.3/go.mod h1:3xCvwCdWdlDJUrvuMn7Wuy9eWs4pE8vqg+NOMyg4B2o=
|
||||
github.com/mattn/go-shellwords v1.0.6/go.mod h1:3xCvwCdWdlDJUrvuMn7Wuy9eWs4pE8vqg+NOMyg4B2o=
|
||||
@ -1100,8 +1093,8 @@ github.com/sclevine/spec v1.2.0/go.mod h1:W4J29eT/Kzv7/b9IWLB055Z+qvVC9vt0Arko24
|
||||
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc=
|
||||
github.com/seccomp/libseccomp-golang v0.9.1/go.mod h1:GbW5+tmTXfcxTToHLXlScSlAvWlF4P2Ca7zGrPiEpWo=
|
||||
github.com/seccomp/libseccomp-golang v0.9.2-0.20210429002308-3879420cc921/go.mod h1:JA8cRccbGaA1s33RQf7Y1+q9gHmZX1yB/z9WDN1C6fg=
|
||||
github.com/sergi/go-diff v1.1.0 h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0=
|
||||
github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM=
|
||||
github.com/sergi/go-diff v1.3.1 h1:xkr+Oxo4BOQKmkn/B9eMK0g5Kg/983T9DqqPHwYqD+8=
|
||||
github.com/sergi/go-diff v1.3.1/go.mod h1:aMJSSKb2lpPvRNec0+w3fl7LP9IOFzdc9Pa4NFbPK1I=
|
||||
github.com/shopspring/decimal v0.0.0-20180709203117-cd690d0c9e24/go.mod h1:M+9NzErvs504Cn4c5DxATwIqPbtswREoFCre64PpcG4=
|
||||
github.com/shopspring/decimal v0.0.0-20200227202807-02e2044944cc/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o=
|
||||
github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o=
|
||||
@ -1160,14 +1153,14 @@ github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/
|
||||
github.com/stretchr/testify v1.7.2/go.mod h1:R6va5+xMeoiuVRoj+gSkQ7d3FALtqAAGI1FQKckRals=
|
||||
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
||||
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
|
||||
github.com/stretchr/testify v1.8.2 h1:+h33VjcLVPDHtOdpUCuF+7gSuG3yGIftsP1YvFihtJ8=
|
||||
github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk=
|
||||
github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw=
|
||||
github.com/swaggo/files v1.0.0 h1:1gGXVIeUFCS/dta17rnP0iOpr6CXFwKD7EO5ID233e4=
|
||||
github.com/swaggo/files v1.0.0/go.mod h1:N59U6URJLyU1PQgFqPM7wXLMhJx7QAolnvfQkqO13kc=
|
||||
github.com/swaggo/http-swagger v1.3.3 h1:Hu5Z0L9ssyBLofaama21iYaF2VbWyA8jdohaaCGpHsc=
|
||||
github.com/swaggo/http-swagger v1.3.3/go.mod h1:sE+4PjD89IxMPm77FnkDz0sdO+p5lbXzrVWT6OTVVGo=
|
||||
github.com/swaggo/swag v1.8.10 h1:eExW4bFa52WOjqRzRD58bgWsWfdFJso50lpbeTcmTfo=
|
||||
github.com/swaggo/swag v1.8.10/go.mod h1:ezQVUUhly8dludpVk+/PuwJWvLLanB13ygV5Pr9enSk=
|
||||
github.com/swaggo/swag v1.16.1 h1:fTNRhKstPKxcnoKsytm4sahr8FaYzUcT7i1/3nd/fBg=
|
||||
github.com/swaggo/swag v1.16.1/go.mod h1:9/LMvHycG3NFHfR6LwvikHv5iFvmPADQ359cKikGxto=
|
||||
github.com/syndtr/gocapability v0.0.0-20170704070218-db04d3cc01c8/go.mod h1:hkRG7XYTFWNJGYcbNJQlaLq0fg1yr4J4t/NcTQtrfww=
|
||||
github.com/syndtr/gocapability v0.0.0-20180916011248-d98352740cb2/go.mod h1:hkRG7XYTFWNJGYcbNJQlaLq0fg1yr4J4t/NcTQtrfww=
|
||||
github.com/syndtr/gocapability v0.0.0-20200815063812-42c35b437635/go.mod h1:hkRG7XYTFWNJGYcbNJQlaLq0fg1yr4J4t/NcTQtrfww=
|
||||
@ -1182,12 +1175,11 @@ github.com/urfave/cli v0.0.0-20171014202726-7bc6a0acffa5/go.mod h1:70zkFmudgCuE/
|
||||
github.com/urfave/cli v1.20.0/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijbERA=
|
||||
github.com/urfave/cli v1.22.1/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0=
|
||||
github.com/urfave/cli v1.22.2/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0=
|
||||
github.com/urfave/cli/v2 v2.8.1/go.mod h1:Z41J9TPoffeoqP0Iza0YbAhGvymRdZAd2uPmZ5JxRdY=
|
||||
github.com/urfave/cli/v2 v2.24.4 h1:0gyJJEBYtCV87zI/x2nZCPyDxD51K6xM8SkwjHFCNEU=
|
||||
github.com/urfave/cli/v2 v2.24.4/go.mod h1:GHupkWPMM0M/sj1a2b4wUrWBPzazNrIjouW6fmdJLxc=
|
||||
github.com/urfave/cli/v2 v2.25.7 h1:VAzn5oq403l5pHjc4OhD54+XGO9cdKVL/7lDjF+iKUs=
|
||||
github.com/urfave/cli/v2 v2.25.7/go.mod h1:8qnjx1vcq5s2/wpsqoZFndg2CE5tNFyrTvS6SinrnYQ=
|
||||
github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc=
|
||||
github.com/vektah/gqlparser/v2 v2.5.1 h1:ZGu+bquAY23jsxDRcYpWjttRZrUz07LbiY77gUOHcr4=
|
||||
github.com/vektah/gqlparser/v2 v2.5.1/go.mod h1:mPgqFBu/woKTVYWyNk8cO3kh4S/f4aRFZrvOnp3hmCs=
|
||||
github.com/vektah/gqlparser/v2 v2.5.8 h1:pm6WOnGdzFOCfcQo9L3+xzW51mKrlwTEg4Wr7AH1JW4=
|
||||
github.com/vektah/gqlparser/v2 v2.5.8/go.mod h1:z8xXUff237NntSuH8mLFijZ+1tjV1swDbpDqjJmk6ME=
|
||||
github.com/vishvananda/netlink v0.0.0-20181108222139-023a6dafdcdf/go.mod h1:+SR5DhBJrl6ZM7CoCKvpw5BKroDKQ+PJqOg65H/2ktk=
|
||||
github.com/vishvananda/netlink v1.1.0/go.mod h1:cTgwzPIzzgDAYoQrMm0EdrjRUBkTqKYppBueQtXaqoE=
|
||||
github.com/vishvananda/netlink v1.1.1-0.20201029203352-d40f9887b852/go.mod h1:twkDnbuQxJYemMlGd4JFIcuhgX83tXhKS2B/PRMpOho=
|
||||
@ -1215,7 +1207,6 @@ github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9de
|
||||
github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||
github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||
github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
|
||||
github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
|
||||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||
github.com/yvasiyarov/go-metrics v0.0.0-20140926110328-57bccd1ccd43/go.mod h1:aX5oPXxHm3bOH+xeAttToC8pqch2ScQN/JoXYupl6xs=
|
||||
github.com/yvasiyarov/gorelic v0.0.0-20141212073537-a9bba5b9ab50/go.mod h1:NUSPSUX/bi6SeDMUh6brw0nXpxHnc96TguQh0+r/ssA=
|
||||
@ -1304,8 +1295,8 @@ golang.org/x/crypto v0.0.0-20210322153248-0c34fe9e7dc2/go.mod h1:T9bdIzuCu7OtxOm
|
||||
golang.org/x/crypto v0.0.0-20210817164053-32db794688a5/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||
golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
|
||||
golang.org/x/crypto v0.6.0 h1:qfktjS5LUO+fFKeJXZ+ikTRijMmljikvG68fpMMruSc=
|
||||
golang.org/x/crypto v0.6.0/go.mod h1:OFC/31mSvZgRz0V1QTNCzfAI1aIRzbiufJtkMIlEp58=
|
||||
golang.org/x/crypto v0.12.0 h1:tFM/ta59kqch6LlvYnPa0yx5a83cL2nHflFhYKvv9Yk=
|
||||
golang.org/x/crypto v0.12.0/go.mod h1:NF0Gs7EO5K4qLn+Ylc+fih8BSTeIjAP05siRnAh98yw=
|
||||
golang.org/x/exp v0.0.0-20180321215751-8460e604b9de/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||
golang.org/x/exp v0.0.0-20180807140117-3d87b88a115f/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||
@ -1355,10 +1346,9 @@ golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||
golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||
golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||
golang.org/x/mod v0.5.0/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro=
|
||||
golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3/go.mod h1:3p9vT2HGsQu2K1YbXdKPJLVgG5VJdoTa1poYQBtP1AY=
|
||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||
golang.org/x/mod v0.8.0 h1:LUYupSeNrTNCGzR/hVBk2NHZO4hXcVaW1k4Qx7rjPx8=
|
||||
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||
golang.org/x/mod v0.12.0 h1:rmsUpXtvNzj340zd98LZ4KntptpfRHwpFOHG188oHXc=
|
||||
golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||
golang.org/x/net v0.0.0-20180218175443-cbe0f9307d01/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
@ -1421,7 +1411,6 @@ golang.org/x/net v0.0.0-20210525063256-abc453219eb5/go.mod h1:9nx3DQGgdP8bBQD5qx
|
||||
golang.org/x/net v0.0.0-20210614182718-04defd469f4e/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||
golang.org/x/net v0.0.0-20210813160813-60bc85c4be6d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||
golang.org/x/net v0.0.0-20210825183410-e898025ed96a/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||
golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||
golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||
golang.org/x/net v0.0.0-20211209124913-491a49abca63/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||
golang.org/x/net v0.0.0-20211216030914-fe4d6282115f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||
@ -1429,8 +1418,8 @@ golang.org/x/net v0.0.0-20220111093109-d55c255bac03/go.mod h1:9nx3DQGgdP8bBQD5qx
|
||||
golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
|
||||
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||
golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY=
|
||||
golang.org/x/net v0.7.0 h1:rJrUqqhjsgNp7KqAIc25s9pZnjU7TUcSY7HcVZjdn1g=
|
||||
golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||
golang.org/x/net v0.14.0 h1:BONx9s002vGdD9umnlX1Po8vOZmrgH34qlHcD1MfK14=
|
||||
golang.org/x/net v0.14.0/go.mod h1:PpSgVXXLK0OxS0F31C1/tv6XNguvCrnXIDrFMspZIUI=
|
||||
golang.org/x/oauth2 v0.0.0-20180227000427-d7d64896b5ff/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||
golang.org/x/oauth2 v0.0.0-20181106182150-f42d05182288/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||
@ -1464,7 +1453,7 @@ golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJ
|
||||
golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.1.0 h1:wsuoTGHzEhffawBOhz5CYhcrV4IdKZbEyZjBMuTp12o=
|
||||
golang.org/x/sync v0.3.0 h1:ftCYgMx6zT/asHUrPw8BLLscYtGznsLAnjq5RH9P66E=
|
||||
golang.org/x/sys v0.0.0-20180224232135-f6cff0780e54/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
@ -1578,7 +1567,6 @@ golang.org/x/sys v0.0.0-20210831042530-f4d43177bf5e/go.mod h1:oPkhp1MJrh7nUepCBc
|
||||
golang.org/x/sys v0.0.0-20210903071746-97244b99971b/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20210906170528-6f6e22806c34/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20210908233432-aa78b53d3365/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20211019181941-9d821ace8654/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20211025201205-69cdffdb9359/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20211116061358-0a5406a5449c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20211124211545-fe61309f8881/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
@ -1588,10 +1576,9 @@ golang.org/x/sys v0.0.0-20220111092808-5a964db01320/go.mod h1:oPkhp1MJrh7nUepCBc
|
||||
golang.org/x/sys v0.0.0-20220317061510-51cd9980dadf/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.5.0 h1:MUK/U/4lj1t1oPg0HfuXDN/Z1wv31ZJ/YcPiGccS4DU=
|
||||
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.11.0 h1:eG7RXZHdqOJ1i+0lgLgCpSXAp6M3LYlAo6osgSi0xOM=
|
||||
golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.0.0-20210220032956-6a3ed077a48d/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
@ -1607,10 +1594,9 @@ golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||
golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ=
|
||||
golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||
golang.org/x/text v0.7.0 h1:4BRB4x83lYWy72KwLD/qYDuTu7q9PjSagHvijDw7cLo=
|
||||
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||
golang.org/x/text v0.12.0 h1:k+n5B8goJNdU7hSvEtMUz3d1Q6D/XW4COJSJR6fN0mc=
|
||||
golang.org/x/text v0.12.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
|
||||
golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
@ -1698,10 +1684,9 @@ golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
|
||||
golang.org/x/tools v0.1.3/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
|
||||
golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
|
||||
golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
|
||||
golang.org/x/tools v0.1.10/go.mod h1:Uh6Zz+xoGYZom868N8YTex3t7RhtHDBrE8Gzo9bV56E=
|
||||
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||
golang.org/x/tools v0.6.0 h1:BOw41kyTf3PuCW1pVQf8+Cyg8pMlkYB1oo9iJ6D/lKM=
|
||||
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
|
||||
golang.org/x/tools v0.12.0 h1:YW6HUoUmYBpwSgyaGaZq1fHjrBjX1rlpZ54T6mu2kss=
|
||||
golang.org/x/tools v0.12.0/go.mod h1:Sc0INKfu04TlqNoRA1hgpFZbhYXHPr4V5DzpSBTPqQM=
|
||||
golang.org/x/xerrors v0.0.0-20190410155217-1f06c39b4373/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20190513163551-3ee3066db522/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
@ -1876,9 +1861,8 @@ google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlba
|
||||
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
|
||||
google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
|
||||
google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
|
||||
google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
|
||||
google.golang.org/protobuf v1.28.1 h1:d0NfwRgPtno5B1Wa6L2DAG+KivqkdutMf1UhdNx175w=
|
||||
google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
|
||||
google.golang.org/protobuf v1.30.0 h1:kPPoIgf3TsEvrm0PFe15JQ+570QVxYzEvvHqChK+cng=
|
||||
google.golang.org/protobuf v1.30.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
|
||||
gopkg.in/airbrake/gobrake.v2 v2.0.9/go.mod h1:/h5ZAUhDkGaJfjzjKLSjv6zCL6O0LLBxU4K+aSYdM/U=
|
||||
gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
@ -2013,3 +1997,5 @@ sigs.k8s.io/structured-merge-diff/v4 v4.0.3/go.mod h1:bJZC9H9iH24zzfZ/41RGcq60oK
|
||||
sigs.k8s.io/structured-merge-diff/v4 v4.1.2/go.mod h1:j/nl6xW8vLS49O8YvXW1ocPhZawJtm+Yrr7PPRQ0Vg4=
|
||||
sigs.k8s.io/yaml v1.1.0/go.mod h1:UJmg0vDUVViEyp3mgSv9WPwZCDxu4rQW1olrI1uml+o=
|
||||
sigs.k8s.io/yaml v1.2.0/go.mod h1:yfXDCHCao9+ENCvLSE62v9VSji2MKu5jeNfTrofGhJc=
|
||||
sigs.k8s.io/yaml v1.3.0 h1:a2VclLzOGrwOHDiV8EfBGhvjHvP46CtW5j6POvhYGGo=
|
||||
sigs.k8s.io/yaml v1.3.0/go.mod h1:GeOyir5tyXNByN85N/dRIT9es5UQNerPYEKK56eTBm8=
|
||||
|
@ -39,6 +39,9 @@ func setup(t *testing.T) *api.RestApi {
|
||||
"kind": "file",
|
||||
"path": "./var/job-archive"
|
||||
},
|
||||
"jwts": {
|
||||
"max-age": "2m"
|
||||
},
|
||||
"clusters": [
|
||||
{
|
||||
"name": "testcluster",
|
||||
|
@ -713,9 +713,367 @@ const docTemplate = `{
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/user/{id}": {
|
||||
"post": {
|
||||
"security": [
|
||||
{
|
||||
"ApiKeyAuth": []
|
||||
}
|
||||
],
|
||||
"description": "Modifies user defined by username (id) in one of four possible ways.\nIf more than one formValue is set then only the highest priority field is used.",
|
||||
"consumes": [
|
||||
"multipart/form-data"
|
||||
],
|
||||
"produces": [
|
||||
"text/plain"
|
||||
],
|
||||
"tags": [
|
||||
"add and modify"
|
||||
],
|
||||
"summary": "Updates an existing user",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Database ID of User",
|
||||
"name": "id",
|
||||
"in": "path",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"enum": [
|
||||
"admin",
|
||||
"support",
|
||||
"manager",
|
||||
"user",
|
||||
"api"
|
||||
],
|
||||
"type": "string",
|
||||
"description": "Priority 1: Role to add",
|
||||
"name": "add-role",
|
||||
"in": "formData"
|
||||
},
|
||||
{
|
||||
"enum": [
|
||||
"admin",
|
||||
"support",
|
||||
"manager",
|
||||
"user",
|
||||
"api"
|
||||
],
|
||||
"type": "string",
|
||||
"description": "Priority 2: Role to remove",
|
||||
"name": "remove-role",
|
||||
"in": "formData"
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Priority 3: Project to add",
|
||||
"name": "add-project",
|
||||
"in": "formData"
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Priority 4: Project to remove",
|
||||
"name": "remove-project",
|
||||
"in": "formData"
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Success Response Message",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"400": {
|
||||
"description": "Bad Request",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"401": {
|
||||
"description": "Unauthorized",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"403": {
|
||||
"description": "Forbidden",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"422": {
|
||||
"description": "Unprocessable Entity: The user could not be updated",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"500": {
|
||||
"description": "Internal Server Error",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/users/": {
|
||||
"get": {
|
||||
"security": [
|
||||
{
|
||||
"ApiKeyAuth": []
|
||||
}
|
||||
],
|
||||
"description": "Returns a JSON-encoded list of users.\nRequired query-parameter defines if all users or only users with additional special roles are returned.",
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"query"
|
||||
],
|
||||
"summary": "Returns a list of users",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "boolean",
|
||||
"description": "If returned list should contain all users or only users with additional special roles",
|
||||
"name": "not-just-user",
|
||||
"in": "query",
|
||||
"required": true
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "List of users returned successfully",
|
||||
"schema": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/api.ApiReturnedUser"
|
||||
}
|
||||
}
|
||||
},
|
||||
"400": {
|
||||
"description": "Bad Request",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"401": {
|
||||
"description": "Unauthorized",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"403": {
|
||||
"description": "Forbidden",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"500": {
|
||||
"description": "Internal Server Error",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"post": {
|
||||
"security": [
|
||||
{
|
||||
"ApiKeyAuth": []
|
||||
}
|
||||
],
|
||||
"description": "User specified in form data will be saved to database.",
|
||||
"consumes": [
|
||||
"multipart/form-data"
|
||||
],
|
||||
"produces": [
|
||||
"text/plain"
|
||||
],
|
||||
"tags": [
|
||||
"add and modify"
|
||||
],
|
||||
"summary": "Adds a new user",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Unique user ID",
|
||||
"name": "username",
|
||||
"in": "formData",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"description": "User password",
|
||||
"name": "password",
|
||||
"in": "formData",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"enum": [
|
||||
"admin",
|
||||
"support",
|
||||
"manager",
|
||||
"user",
|
||||
"api"
|
||||
],
|
||||
"type": "string",
|
||||
"description": "User role",
|
||||
"name": "role",
|
||||
"in": "formData",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Managed project, required for new manager role user",
|
||||
"name": "project",
|
||||
"in": "formData"
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Users name",
|
||||
"name": "name",
|
||||
"in": "formData"
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Users email",
|
||||
"name": "email",
|
||||
"in": "formData"
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Success Response",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"400": {
|
||||
"description": "Bad Request",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"401": {
|
||||
"description": "Unauthorized",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"403": {
|
||||
"description": "Forbidden",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"422": {
|
||||
"description": "Unprocessable Entity: creating user failed",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"500": {
|
||||
"description": "Internal Server Error",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"delete": {
|
||||
"security": [
|
||||
{
|
||||
"ApiKeyAuth": []
|
||||
}
|
||||
],
|
||||
"description": "User defined by username in form data will be deleted from database.",
|
||||
"consumes": [
|
||||
"multipart/form-data"
|
||||
],
|
||||
"produces": [
|
||||
"text/plain"
|
||||
],
|
||||
"tags": [
|
||||
"remove"
|
||||
],
|
||||
"summary": "Deletes a user",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "User ID to delete",
|
||||
"name": "username",
|
||||
"in": "formData",
|
||||
"required": true
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "User deleted successfully"
|
||||
},
|
||||
"400": {
|
||||
"description": "Bad Request",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"401": {
|
||||
"description": "Unauthorized",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"403": {
|
||||
"description": "Forbidden",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"422": {
|
||||
"description": "Unprocessable Entity: deleting user failed",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"500": {
|
||||
"description": "Internal Server Error",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"definitions": {
|
||||
"api.ApiReturnedUser": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"email": {
|
||||
"type": "string"
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"projects": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"roles": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"username": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"api.ApiTag": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
@ -1372,7 +1730,7 @@ const docTemplate = `{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"description": "The unique DB identifier of a tag\nThe unique DB identifier of a tag",
|
||||
"description": "The unique DB identifier of a tag",
|
||||
"type": "integer"
|
||||
},
|
||||
"name": {
|
||||
@ -1415,7 +1773,7 @@ const docTemplate = `{
|
||||
|
||||
// SwaggerInfo holds exported Swagger Info so clients can modify it
|
||||
var SwaggerInfo = &swag.Spec{
|
||||
Version: "1",
|
||||
Version: "1.0.0",
|
||||
Host: "localhost:8080",
|
||||
BasePath: "/api",
|
||||
Schemes: []string{},
|
||||
|
@ -1,4 +1,4 @@
|
||||
// Copyright (C) 2022 NHR@FAU, University Erlangen-Nuremberg.
|
||||
// Copyright (C) 2023 NHR@FAU, University Erlangen-Nuremberg.
|
||||
// All rights reserved.
|
||||
// Use of this source code is governed by a MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
@ -20,11 +20,13 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/ClusterCockpit/cc-backend/internal/auth"
|
||||
"github.com/ClusterCockpit/cc-backend/internal/config"
|
||||
"github.com/ClusterCockpit/cc-backend/internal/graph"
|
||||
"github.com/ClusterCockpit/cc-backend/internal/graph/model"
|
||||
"github.com/ClusterCockpit/cc-backend/internal/importer"
|
||||
"github.com/ClusterCockpit/cc-backend/internal/metricdata"
|
||||
"github.com/ClusterCockpit/cc-backend/internal/repository"
|
||||
"github.com/ClusterCockpit/cc-backend/internal/util"
|
||||
"github.com/ClusterCockpit/cc-backend/pkg/archive"
|
||||
"github.com/ClusterCockpit/cc-backend/pkg/log"
|
||||
"github.com/ClusterCockpit/cc-backend/pkg/schema"
|
||||
@ -76,6 +78,11 @@ func (api *RestApi) MountRoutes(r *mux.Router) {
|
||||
r.HandleFunc("/jobs/delete_job/{id}", api.deleteJobById).Methods(http.MethodDelete)
|
||||
r.HandleFunc("/jobs/delete_job_before/{ts}", api.deleteJobBefore).Methods(http.MethodDelete)
|
||||
|
||||
if api.MachineStateDir != "" {
|
||||
r.HandleFunc("/machine_state/{cluster}/{host}", api.getMachineState).Methods(http.MethodGet)
|
||||
r.HandleFunc("/machine_state/{cluster}/{host}", api.putMachineState).Methods(http.MethodPut, http.MethodPost)
|
||||
}
|
||||
|
||||
if api.Authentication != nil {
|
||||
r.HandleFunc("/jwt/", api.getJWT).Methods(http.MethodGet)
|
||||
r.HandleFunc("/roles/", api.getRoles).Methods(http.MethodGet)
|
||||
@ -85,11 +92,6 @@ func (api *RestApi) MountRoutes(r *mux.Router) {
|
||||
r.HandleFunc("/user/{id}", api.updateUser).Methods(http.MethodPost)
|
||||
r.HandleFunc("/configuration/", api.updateConfiguration).Methods(http.MethodPost)
|
||||
}
|
||||
|
||||
if api.MachineStateDir != "" {
|
||||
r.HandleFunc("/machine_state/{cluster}/{host}", api.getMachineState).Methods(http.MethodGet)
|
||||
r.HandleFunc("/machine_state/{cluster}/{host}", api.putMachineState).Methods(http.MethodPut, http.MethodPost)
|
||||
}
|
||||
}
|
||||
|
||||
// StartJobApiResponse model
|
||||
@ -103,6 +105,11 @@ type DeleteJobApiResponse struct {
|
||||
Message string `json:"msg"`
|
||||
}
|
||||
|
||||
// UpdateUserApiResponse model
|
||||
type UpdateUserApiResponse struct {
|
||||
Message string `json:"msg"`
|
||||
}
|
||||
|
||||
// StopJobApiRequest model
|
||||
type StopJobApiRequest struct {
|
||||
// Stop Time of job as epoch
|
||||
@ -156,6 +163,14 @@ type JobMetricWithName struct {
|
||||
Metric *schema.JobMetric `json:"metric"`
|
||||
}
|
||||
|
||||
type ApiReturnedUser struct {
|
||||
Username string `json:"username"`
|
||||
Name string `json:"name"`
|
||||
Roles []string `json:"roles"`
|
||||
Email string `json:"email"`
|
||||
Projects []string `json:"projects"`
|
||||
}
|
||||
|
||||
func handleError(err error, statusCode int, rw http.ResponseWriter) {
|
||||
log.Warnf("REST ERROR : %s", err.Error())
|
||||
rw.Header().Add("Content-Type", "application/json")
|
||||
@ -172,6 +187,40 @@ func decode(r io.Reader, val interface{}) error {
|
||||
return dec.Decode(val)
|
||||
}
|
||||
|
||||
func securedCheck(r *http.Request) error {
|
||||
user := repository.GetUserFromContext(r.Context())
|
||||
if user == nil {
|
||||
return fmt.Errorf("no user in context")
|
||||
}
|
||||
|
||||
if user.AuthType == schema.AuthToken {
|
||||
// If nothing declared in config: deny all request to this endpoint
|
||||
if config.Keys.ApiAllowedIPs == nil || len(config.Keys.ApiAllowedIPs) == 0 {
|
||||
return fmt.Errorf("missing configuration key ApiAllowedIPs")
|
||||
}
|
||||
|
||||
if config.Keys.ApiAllowedIPs[0] == "*" {
|
||||
return nil
|
||||
}
|
||||
|
||||
// extract IP address
|
||||
IPAddress := r.Header.Get("X-Real-Ip")
|
||||
if IPAddress == "" {
|
||||
IPAddress = r.Header.Get("X-Forwarded-For")
|
||||
}
|
||||
if IPAddress == "" {
|
||||
IPAddress = r.RemoteAddr
|
||||
}
|
||||
|
||||
// check if IP is allowed
|
||||
if !util.Contains(config.Keys.ApiAllowedIPs, IPAddress) {
|
||||
return fmt.Errorf("unknown ip: %v", IPAddress)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// getJobs godoc
|
||||
// @summary Lists all jobs
|
||||
// @tags query
|
||||
@ -193,8 +242,10 @@ func decode(r io.Reader, val interface{}) error {
|
||||
// @router /jobs/ [get]
|
||||
func (api *RestApi) getJobs(rw http.ResponseWriter, r *http.Request) {
|
||||
|
||||
if user := auth.GetUser(r.Context()); user != nil && !user.HasRole(auth.RoleApi) {
|
||||
handleError(fmt.Errorf("missing role: %v", auth.GetRoleString(auth.RoleApi)), http.StatusForbidden, rw)
|
||||
if user := repository.GetUserFromContext(r.Context()); user != nil &&
|
||||
!user.HasRole(schema.RoleApi) {
|
||||
|
||||
handleError(fmt.Errorf("missing role: %v", schema.GetRoleString(schema.RoleApi)), http.StatusForbidden, rw)
|
||||
return
|
||||
}
|
||||
|
||||
@ -335,9 +386,11 @@ func (api *RestApi) getJobs(rw http.ResponseWriter, r *http.Request) {
|
||||
// @security ApiKeyAuth
|
||||
// @router /jobs/{id} [post]
|
||||
func (api *RestApi) getJobById(rw http.ResponseWriter, r *http.Request) {
|
||||
if user := auth.GetUser(r.Context()); user != nil && !user.HasRole(auth.RoleApi) {
|
||||
if user := repository.GetUserFromContext(r.Context()); user != nil &&
|
||||
!user.HasRole(schema.RoleApi) {
|
||||
|
||||
handleError(fmt.Errorf("missing role: %v",
|
||||
auth.GetRoleString(auth.RoleApi)), http.StatusForbidden, rw)
|
||||
schema.GetRoleString(schema.RoleApi)), http.StatusForbidden, rw)
|
||||
return
|
||||
}
|
||||
|
||||
@ -426,8 +479,10 @@ func (api *RestApi) getJobById(rw http.ResponseWriter, r *http.Request) {
|
||||
// @security ApiKeyAuth
|
||||
// @router /jobs/tag_job/{id} [post]
|
||||
func (api *RestApi) tagJob(rw http.ResponseWriter, r *http.Request) {
|
||||
if user := auth.GetUser(r.Context()); user != nil && !user.HasRole(auth.RoleApi) {
|
||||
handleError(fmt.Errorf("missing role: %v", auth.GetRoleString(auth.RoleApi)), http.StatusForbidden, rw)
|
||||
if user := repository.GetUserFromContext(r.Context()); user != nil &&
|
||||
!user.HasRole(schema.RoleApi) {
|
||||
|
||||
handleError(fmt.Errorf("missing role: %v", schema.GetRoleString(schema.RoleApi)), http.StatusForbidden, rw)
|
||||
return
|
||||
}
|
||||
|
||||
@ -491,8 +546,10 @@ func (api *RestApi) tagJob(rw http.ResponseWriter, r *http.Request) {
|
||||
// @security ApiKeyAuth
|
||||
// @router /jobs/start_job/ [post]
|
||||
func (api *RestApi) startJob(rw http.ResponseWriter, r *http.Request) {
|
||||
if user := auth.GetUser(r.Context()); user != nil && !user.HasRole(auth.RoleApi) {
|
||||
handleError(fmt.Errorf("missing role: %v", auth.GetRoleString(auth.RoleApi)), http.StatusForbidden, rw)
|
||||
if user := repository.GetUserFromContext(r.Context()); user != nil &&
|
||||
!user.HasRole(schema.RoleApi) {
|
||||
|
||||
handleError(fmt.Errorf("missing role: %v", schema.GetRoleString(schema.RoleApi)), http.StatusForbidden, rw)
|
||||
return
|
||||
}
|
||||
|
||||
@ -572,8 +629,10 @@ func (api *RestApi) startJob(rw http.ResponseWriter, r *http.Request) {
|
||||
// @security ApiKeyAuth
|
||||
// @router /jobs/stop_job/{id} [post]
|
||||
func (api *RestApi) stopJobById(rw http.ResponseWriter, r *http.Request) {
|
||||
if user := auth.GetUser(r.Context()); user != nil && !user.HasRole(auth.RoleApi) {
|
||||
handleError(fmt.Errorf("missing role: %v", auth.GetRoleString(auth.RoleApi)), http.StatusForbidden, rw)
|
||||
if user := repository.GetUserFromContext(r.Context()); user != nil &&
|
||||
!user.HasRole(schema.RoleApi) {
|
||||
|
||||
handleError(fmt.Errorf("missing role: %v", schema.GetRoleString(schema.RoleApi)), http.StatusForbidden, rw)
|
||||
return
|
||||
}
|
||||
|
||||
@ -625,8 +684,10 @@ func (api *RestApi) stopJobById(rw http.ResponseWriter, r *http.Request) {
|
||||
// @security ApiKeyAuth
|
||||
// @router /jobs/stop_job/ [post]
|
||||
func (api *RestApi) stopJobByRequest(rw http.ResponseWriter, r *http.Request) {
|
||||
if user := auth.GetUser(r.Context()); user != nil && !user.HasRole(auth.RoleApi) {
|
||||
handleError(fmt.Errorf("missing role: %v", auth.GetRoleString(auth.RoleApi)), http.StatusForbidden, rw)
|
||||
if user := repository.GetUserFromContext(r.Context()); user != nil &&
|
||||
!user.HasRole(schema.RoleApi) {
|
||||
|
||||
handleError(fmt.Errorf("missing role: %v", schema.GetRoleString(schema.RoleApi)), http.StatusForbidden, rw)
|
||||
return
|
||||
}
|
||||
|
||||
@ -671,8 +732,8 @@ func (api *RestApi) stopJobByRequest(rw http.ResponseWriter, r *http.Request) {
|
||||
// @security ApiKeyAuth
|
||||
// @router /jobs/delete_job/{id} [delete]
|
||||
func (api *RestApi) deleteJobById(rw http.ResponseWriter, r *http.Request) {
|
||||
if user := auth.GetUser(r.Context()); user != nil && !user.HasRole(auth.RoleApi) {
|
||||
handleError(fmt.Errorf("missing role: %v", auth.GetRoleString(auth.RoleApi)), http.StatusForbidden, rw)
|
||||
if user := repository.GetUserFromContext(r.Context()); user != nil && !user.HasRole(schema.RoleApi) {
|
||||
handleError(fmt.Errorf("missing role: %v", schema.GetRoleString(schema.RoleApi)), http.StatusForbidden, rw)
|
||||
return
|
||||
}
|
||||
|
||||
@ -719,8 +780,9 @@ func (api *RestApi) deleteJobById(rw http.ResponseWriter, r *http.Request) {
|
||||
// @security ApiKeyAuth
|
||||
// @router /jobs/delete_job/ [delete]
|
||||
func (api *RestApi) deleteJobByRequest(rw http.ResponseWriter, r *http.Request) {
|
||||
if user := auth.GetUser(r.Context()); user != nil && !user.HasRole(auth.RoleApi) {
|
||||
handleError(fmt.Errorf("missing role: %v", auth.GetRoleString(auth.RoleApi)), http.StatusForbidden, rw)
|
||||
if user := repository.GetUserFromContext(r.Context()); user != nil &&
|
||||
!user.HasRole(schema.RoleApi) {
|
||||
handleError(fmt.Errorf("missing role: %v", schema.GetRoleString(schema.RoleApi)), http.StatusForbidden, rw)
|
||||
return
|
||||
}
|
||||
|
||||
@ -775,8 +837,8 @@ func (api *RestApi) deleteJobByRequest(rw http.ResponseWriter, r *http.Request)
|
||||
// @security ApiKeyAuth
|
||||
// @router /jobs/delete_job_before/{ts} [delete]
|
||||
func (api *RestApi) deleteJobBefore(rw http.ResponseWriter, r *http.Request) {
|
||||
if user := auth.GetUser(r.Context()); user != nil && !user.HasRole(auth.RoleApi) {
|
||||
handleError(fmt.Errorf("missing role: %v", auth.GetRoleString(auth.RoleApi)), http.StatusForbidden, rw)
|
||||
if user := repository.GetUserFromContext(r.Context()); user != nil && !user.HasRole(schema.RoleApi) {
|
||||
handleError(fmt.Errorf("missing role: %v", schema.GetRoleString(schema.RoleApi)), http.StatusForbidden, rw)
|
||||
return
|
||||
}
|
||||
|
||||
@ -891,11 +953,223 @@ func (api *RestApi) getJobMetrics(rw http.ResponseWriter, r *http.Request) {
|
||||
})
|
||||
}
|
||||
|
||||
// createUser godoc
|
||||
// @summary Adds a new user
|
||||
// @tags add and modify
|
||||
// @description User specified in form data will be saved to database.
|
||||
// @accept mpfd
|
||||
// @produce plain
|
||||
// @param username formData string true "Unique user ID"
|
||||
// @param password formData string true "User password"
|
||||
// @param role formData string true "User role" Enums(admin, support, manager, user, api)
|
||||
// @param project formData string false "Managed project, required for new manager role user"
|
||||
// @param name formData string false "Users name"
|
||||
// @param email formData string false "Users email"
|
||||
// @success 200 {string} string "Success Response"
|
||||
// @failure 400 {string} string "Bad Request"
|
||||
// @failure 401 {string} string "Unauthorized"
|
||||
// @failure 403 {string} string "Forbidden"
|
||||
// @failure 422 {string} string "Unprocessable Entity: creating user failed"
|
||||
// @failure 500 {string} string "Internal Server Error"
|
||||
// @security ApiKeyAuth
|
||||
// @router /users/ [post]
|
||||
func (api *RestApi) createUser(rw http.ResponseWriter, r *http.Request) {
|
||||
err := securedCheck(r)
|
||||
if err != nil {
|
||||
http.Error(rw, err.Error(), http.StatusForbidden)
|
||||
return
|
||||
}
|
||||
|
||||
rw.Header().Set("Content-Type", "text/plain")
|
||||
me := repository.GetUserFromContext(r.Context())
|
||||
if !me.HasRole(schema.RoleAdmin) {
|
||||
http.Error(rw, "Only admins are allowed to create new users", http.StatusForbidden)
|
||||
return
|
||||
}
|
||||
|
||||
username, password, role, name, email, project := r.FormValue("username"),
|
||||
r.FormValue("password"), r.FormValue("role"), r.FormValue("name"),
|
||||
r.FormValue("email"), r.FormValue("project")
|
||||
|
||||
if len(password) == 0 && role != schema.GetRoleString(schema.RoleApi) {
|
||||
http.Error(rw, "Only API users are allowed to have a blank password (login will be impossible)", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
if len(project) != 0 && role != schema.GetRoleString(schema.RoleManager) {
|
||||
http.Error(rw, "only managers require a project (can be changed later)",
|
||||
http.StatusBadRequest)
|
||||
return
|
||||
} else if len(project) == 0 && role == schema.GetRoleString(schema.RoleManager) {
|
||||
http.Error(rw, "managers require a project to manage (can be changed later)",
|
||||
http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
if err := repository.GetUserRepository().AddUser(&schema.User{
|
||||
Username: username,
|
||||
Name: name,
|
||||
Password: password,
|
||||
Email: email,
|
||||
Projects: []string{project},
|
||||
Roles: []string{role}}); err != nil {
|
||||
http.Error(rw, err.Error(), http.StatusUnprocessableEntity)
|
||||
return
|
||||
}
|
||||
|
||||
rw.Write([]byte(fmt.Sprintf("User %v successfully created!\n", username)))
|
||||
}
|
||||
|
||||
// deleteUser godoc
|
||||
// @summary Deletes a user
|
||||
// @tags remove
|
||||
// @description User defined by username in form data will be deleted from database.
|
||||
// @accept mpfd
|
||||
// @produce plain
|
||||
// @param username formData string true "User ID to delete"
|
||||
// @success 200 "User deleted successfully"
|
||||
// @failure 400 {string} string "Bad Request"
|
||||
// @failure 401 {string} string "Unauthorized"
|
||||
// @failure 403 {string} string "Forbidden"
|
||||
// @failure 422 {string} string "Unprocessable Entity: deleting user failed"
|
||||
// @failure 500 {string} string "Internal Server Error"
|
||||
// @security ApiKeyAuth
|
||||
// @router /users/ [delete]
|
||||
func (api *RestApi) deleteUser(rw http.ResponseWriter, r *http.Request) {
|
||||
err := securedCheck(r)
|
||||
if err != nil {
|
||||
http.Error(rw, err.Error(), http.StatusForbidden)
|
||||
return
|
||||
}
|
||||
|
||||
if user := repository.GetUserFromContext(r.Context()); !user.HasRole(schema.RoleAdmin) {
|
||||
http.Error(rw, "Only admins are allowed to delete a user", http.StatusForbidden)
|
||||
return
|
||||
}
|
||||
|
||||
username := r.FormValue("username")
|
||||
if err := repository.GetUserRepository().DelUser(username); err != nil {
|
||||
http.Error(rw, err.Error(), http.StatusUnprocessableEntity)
|
||||
return
|
||||
}
|
||||
|
||||
rw.WriteHeader(http.StatusOK)
|
||||
}
|
||||
|
||||
// getUsers godoc
|
||||
// @summary Returns a list of users
|
||||
// @tags query
|
||||
// @description Returns a JSON-encoded list of users.
|
||||
// @description Required query-parameter defines if all users or only users with additional special roles are returned.
|
||||
// @produce json
|
||||
// @param not-just-user query bool true "If returned list should contain all users or only users with additional special roles"
|
||||
// @success 200 {array} api.ApiReturnedUser "List of users returned successfully"
|
||||
// @failure 400 {string} string "Bad Request"
|
||||
// @failure 401 {string} string "Unauthorized"
|
||||
// @failure 403 {string} string "Forbidden"
|
||||
// @failure 500 {string} string "Internal Server Error"
|
||||
// @security ApiKeyAuth
|
||||
// @router /users/ [get]
|
||||
func (api *RestApi) getUsers(rw http.ResponseWriter, r *http.Request) {
|
||||
err := securedCheck(r)
|
||||
if err != nil {
|
||||
http.Error(rw, err.Error(), http.StatusForbidden)
|
||||
return
|
||||
}
|
||||
|
||||
if user := repository.GetUserFromContext(r.Context()); !user.HasRole(schema.RoleAdmin) {
|
||||
http.Error(rw, "Only admins are allowed to fetch a list of users", http.StatusForbidden)
|
||||
return
|
||||
}
|
||||
|
||||
users, err := repository.GetUserRepository().ListUsers(r.URL.Query().Get("not-just-user") == "true")
|
||||
if err != nil {
|
||||
http.Error(rw, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
json.NewEncoder(rw).Encode(users)
|
||||
}
|
||||
|
||||
// updateUser godoc
|
||||
// @summary Updates an existing user
|
||||
// @tags add and modify
|
||||
// @description Modifies user defined by username (id) in one of four possible ways.
|
||||
// @description If more than one formValue is set then only the highest priority field is used.
|
||||
// @accept mpfd
|
||||
// @produce plain
|
||||
// @param id path string true "Database ID of User"
|
||||
// @param add-role formData string false "Priority 1: Role to add" Enums(admin, support, manager, user, api)
|
||||
// @param remove-role formData string false "Priority 2: Role to remove" Enums(admin, support, manager, user, api)
|
||||
// @param add-project formData string false "Priority 3: Project to add"
|
||||
// @param remove-project formData string false "Priority 4: Project to remove"
|
||||
// @success 200 {string} string "Success Response Message"
|
||||
// @failure 400 {string} string "Bad Request"
|
||||
// @failure 401 {string} string "Unauthorized"
|
||||
// @failure 403 {string} string "Forbidden"
|
||||
// @failure 422 {string} string "Unprocessable Entity: The user could not be updated"
|
||||
// @failure 500 {string} string "Internal Server Error"
|
||||
// @security ApiKeyAuth
|
||||
// @router /user/{id} [post]
|
||||
func (api *RestApi) updateUser(rw http.ResponseWriter, r *http.Request) {
|
||||
err := securedCheck(r)
|
||||
if err != nil {
|
||||
http.Error(rw, err.Error(), http.StatusForbidden)
|
||||
return
|
||||
}
|
||||
|
||||
if user := repository.GetUserFromContext(r.Context()); !user.HasRole(schema.RoleAdmin) {
|
||||
http.Error(rw, "Only admins are allowed to update a user", http.StatusForbidden)
|
||||
return
|
||||
}
|
||||
|
||||
// Get Values
|
||||
newrole := r.FormValue("add-role")
|
||||
delrole := r.FormValue("remove-role")
|
||||
newproj := r.FormValue("add-project")
|
||||
delproj := r.FormValue("remove-project")
|
||||
|
||||
// TODO: Handle anything but roles...
|
||||
if newrole != "" {
|
||||
if err := repository.GetUserRepository().AddRole(r.Context(), mux.Vars(r)["id"], newrole); err != nil {
|
||||
http.Error(rw, err.Error(), http.StatusUnprocessableEntity)
|
||||
return
|
||||
}
|
||||
rw.Write([]byte("Add Role Success"))
|
||||
} else if delrole != "" {
|
||||
if err := repository.GetUserRepository().RemoveRole(r.Context(), mux.Vars(r)["id"], delrole); err != nil {
|
||||
http.Error(rw, err.Error(), http.StatusUnprocessableEntity)
|
||||
return
|
||||
}
|
||||
rw.Write([]byte("Remove Role Success"))
|
||||
} else if newproj != "" {
|
||||
if err := repository.GetUserRepository().AddProject(r.Context(), mux.Vars(r)["id"], newproj); err != nil {
|
||||
http.Error(rw, err.Error(), http.StatusUnprocessableEntity)
|
||||
return
|
||||
}
|
||||
rw.Write([]byte("Add Project Success"))
|
||||
} else if delproj != "" {
|
||||
if err := repository.GetUserRepository().RemoveProject(r.Context(), mux.Vars(r)["id"], delproj); err != nil {
|
||||
http.Error(rw, err.Error(), http.StatusUnprocessableEntity)
|
||||
return
|
||||
}
|
||||
rw.Write([]byte("Remove Project Success"))
|
||||
} else {
|
||||
http.Error(rw, "Not Add or Del [role|project]?", http.StatusInternalServerError)
|
||||
}
|
||||
}
|
||||
|
||||
func (api *RestApi) getJWT(rw http.ResponseWriter, r *http.Request) {
|
||||
err := securedCheck(r)
|
||||
if err != nil {
|
||||
http.Error(rw, err.Error(), http.StatusForbidden)
|
||||
return
|
||||
}
|
||||
|
||||
rw.Header().Set("Content-Type", "text/plain")
|
||||
username := r.FormValue("username")
|
||||
me := auth.GetUser(r.Context())
|
||||
if !me.HasRole(auth.RoleAdmin) {
|
||||
me := repository.GetUserFromContext(r.Context())
|
||||
if !me.HasRole(schema.RoleAdmin) {
|
||||
if username != me.Username {
|
||||
http.Error(rw, "Only admins are allowed to sign JWTs not for themselves",
|
||||
http.StatusForbidden)
|
||||
@ -903,7 +1177,7 @@ func (api *RestApi) getJWT(rw http.ResponseWriter, r *http.Request) {
|
||||
}
|
||||
}
|
||||
|
||||
user, err := api.Authentication.GetUser(username)
|
||||
user, err := repository.GetUserRepository().GetUser(username)
|
||||
if err != nil {
|
||||
http.Error(rw, err.Error(), http.StatusUnprocessableEntity)
|
||||
return
|
||||
@ -919,80 +1193,20 @@ func (api *RestApi) getJWT(rw http.ResponseWriter, r *http.Request) {
|
||||
rw.Write([]byte(jwt))
|
||||
}
|
||||
|
||||
func (api *RestApi) createUser(rw http.ResponseWriter, r *http.Request) {
|
||||
rw.Header().Set("Content-Type", "text/plain")
|
||||
me := auth.GetUser(r.Context())
|
||||
if !me.HasRole(auth.RoleAdmin) {
|
||||
http.Error(rw, "Only admins are allowed to create new users", http.StatusForbidden)
|
||||
return
|
||||
}
|
||||
|
||||
username, password, role, name, email, project := r.FormValue("username"), r.FormValue("password"), r.FormValue("role"), r.FormValue("name"), r.FormValue("email"), r.FormValue("project")
|
||||
if len(password) == 0 && role != auth.GetRoleString(auth.RoleApi) {
|
||||
http.Error(rw, "Only API users are allowed to have a blank password (login will be impossible)", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
if len(project) != 0 && role != auth.GetRoleString(auth.RoleManager) {
|
||||
http.Error(rw, "only managers require a project (can be changed later)", http.StatusBadRequest)
|
||||
return
|
||||
} else if len(project) == 0 && role == auth.GetRoleString(auth.RoleManager) {
|
||||
http.Error(rw, "managers require a project to manage (can be changed later)", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
if err := api.Authentication.AddUser(&auth.User{
|
||||
Username: username,
|
||||
Name: name,
|
||||
Password: password,
|
||||
Email: email,
|
||||
Projects: []string{project},
|
||||
Roles: []string{role}}); err != nil {
|
||||
http.Error(rw, err.Error(), http.StatusUnprocessableEntity)
|
||||
return
|
||||
}
|
||||
|
||||
rw.Write([]byte(fmt.Sprintf("User %v successfully created!\n", username)))
|
||||
}
|
||||
|
||||
func (api *RestApi) deleteUser(rw http.ResponseWriter, r *http.Request) {
|
||||
if user := auth.GetUser(r.Context()); !user.HasRole(auth.RoleAdmin) {
|
||||
http.Error(rw, "Only admins are allowed to delete a user", http.StatusForbidden)
|
||||
return
|
||||
}
|
||||
|
||||
username := r.FormValue("username")
|
||||
if err := api.Authentication.DelUser(username); err != nil {
|
||||
http.Error(rw, err.Error(), http.StatusUnprocessableEntity)
|
||||
return
|
||||
}
|
||||
|
||||
rw.WriteHeader(http.StatusOK)
|
||||
}
|
||||
|
||||
func (api *RestApi) getUsers(rw http.ResponseWriter, r *http.Request) {
|
||||
if user := auth.GetUser(r.Context()); !user.HasRole(auth.RoleAdmin) {
|
||||
http.Error(rw, "Only admins are allowed to fetch a list of users", http.StatusForbidden)
|
||||
return
|
||||
}
|
||||
|
||||
users, err := api.Authentication.ListUsers(r.URL.Query().Get("not-just-user") == "true")
|
||||
if err != nil {
|
||||
http.Error(rw, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
json.NewEncoder(rw).Encode(users)
|
||||
}
|
||||
|
||||
func (api *RestApi) getRoles(rw http.ResponseWriter, r *http.Request) {
|
||||
user := auth.GetUser(r.Context())
|
||||
if !user.HasRole(auth.RoleAdmin) {
|
||||
err := securedCheck(r)
|
||||
if err != nil {
|
||||
http.Error(rw, err.Error(), http.StatusForbidden)
|
||||
return
|
||||
}
|
||||
|
||||
user := repository.GetUserFromContext(r.Context())
|
||||
if !user.HasRole(schema.RoleAdmin) {
|
||||
http.Error(rw, "only admins are allowed to fetch a list of roles", http.StatusForbidden)
|
||||
return
|
||||
}
|
||||
|
||||
roles, err := auth.GetValidRoles(user)
|
||||
roles, err := schema.GetValidRoles(user)
|
||||
if err != nil {
|
||||
http.Error(rw, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
@ -1001,55 +1215,13 @@ func (api *RestApi) getRoles(rw http.ResponseWriter, r *http.Request) {
|
||||
json.NewEncoder(rw).Encode(roles)
|
||||
}
|
||||
|
||||
func (api *RestApi) updateUser(rw http.ResponseWriter, r *http.Request) {
|
||||
if user := auth.GetUser(r.Context()); !user.HasRole(auth.RoleAdmin) {
|
||||
http.Error(rw, "Only admins are allowed to update a user", http.StatusForbidden)
|
||||
return
|
||||
}
|
||||
|
||||
// Get Values
|
||||
newrole := r.FormValue("add-role")
|
||||
delrole := r.FormValue("remove-role")
|
||||
newproj := r.FormValue("add-project")
|
||||
delproj := r.FormValue("remove-project")
|
||||
|
||||
// TODO: Handle anything but roles...
|
||||
if newrole != "" {
|
||||
if err := api.Authentication.AddRole(r.Context(), mux.Vars(r)["id"], newrole); err != nil {
|
||||
http.Error(rw, err.Error(), http.StatusUnprocessableEntity)
|
||||
return
|
||||
}
|
||||
rw.Write([]byte("Add Role Success"))
|
||||
} else if delrole != "" {
|
||||
if err := api.Authentication.RemoveRole(r.Context(), mux.Vars(r)["id"], delrole); err != nil {
|
||||
http.Error(rw, err.Error(), http.StatusUnprocessableEntity)
|
||||
return
|
||||
}
|
||||
rw.Write([]byte("Remove Role Success"))
|
||||
} else if newproj != "" {
|
||||
if err := api.Authentication.AddProject(r.Context(), mux.Vars(r)["id"], newproj); err != nil {
|
||||
http.Error(rw, err.Error(), http.StatusUnprocessableEntity)
|
||||
return
|
||||
}
|
||||
rw.Write([]byte("Add Project Success"))
|
||||
} else if delproj != "" {
|
||||
if err := api.Authentication.RemoveProject(r.Context(), mux.Vars(r)["id"], delproj); err != nil {
|
||||
http.Error(rw, err.Error(), http.StatusUnprocessableEntity)
|
||||
return
|
||||
}
|
||||
rw.Write([]byte("Remove Project Success"))
|
||||
} else {
|
||||
http.Error(rw, "Not Add or Del [role|project]?", http.StatusInternalServerError)
|
||||
}
|
||||
}
|
||||
|
||||
func (api *RestApi) updateConfiguration(rw http.ResponseWriter, r *http.Request) {
|
||||
rw.Header().Set("Content-Type", "text/plain")
|
||||
key, value := r.FormValue("key"), r.FormValue("value")
|
||||
|
||||
fmt.Printf("REST > KEY: %#v\nVALUE: %#v\n", key, value)
|
||||
|
||||
if err := repository.GetUserCfgRepo().UpdateConfig(key, value, auth.GetUser(r.Context())); err != nil {
|
||||
if err := repository.GetUserCfgRepo().UpdateConfig(key, value, repository.GetUserFromContext(r.Context())); err != nil {
|
||||
http.Error(rw, err.Error(), http.StatusUnprocessableEntity)
|
||||
return
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
// Copyright (C) 2022 NHR@FAU, University Erlangen-Nuremberg.
|
||||
// Copyright (C) 2023 NHR@FAU, University Erlangen-Nuremberg.
|
||||
// All rights reserved.
|
||||
// Use of this source code is governed by a MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
@ -7,224 +7,26 @@ package auth
|
||||
import (
|
||||
"context"
|
||||
"crypto/rand"
|
||||
"database/sql"
|
||||
"encoding/base64"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"os"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/ClusterCockpit/cc-backend/internal/config"
|
||||
"github.com/ClusterCockpit/cc-backend/internal/repository"
|
||||
"github.com/ClusterCockpit/cc-backend/pkg/log"
|
||||
"github.com/ClusterCockpit/cc-backend/pkg/schema"
|
||||
"github.com/gorilla/sessions"
|
||||
"github.com/jmoiron/sqlx"
|
||||
)
|
||||
|
||||
type AuthSource int
|
||||
|
||||
const (
|
||||
AuthViaLocalPassword AuthSource = iota
|
||||
AuthViaLDAP
|
||||
AuthViaToken
|
||||
)
|
||||
|
||||
type User struct {
|
||||
Username string `json:"username"`
|
||||
Password string `json:"-"`
|
||||
Name string `json:"name"`
|
||||
Roles []string `json:"roles"`
|
||||
AuthSource AuthSource `json:"via"`
|
||||
Email string `json:"email"`
|
||||
Projects []string `json:"projects"`
|
||||
Expiration time.Time
|
||||
}
|
||||
|
||||
type Role int
|
||||
|
||||
const (
|
||||
RoleAnonymous Role = iota
|
||||
RoleApi
|
||||
RoleUser
|
||||
RoleManager
|
||||
RoleSupport
|
||||
RoleAdmin
|
||||
RoleError
|
||||
)
|
||||
|
||||
func GetRoleString(roleInt Role) string {
|
||||
return [6]string{"anonymous", "api", "user", "manager", "support", "admin"}[roleInt]
|
||||
}
|
||||
|
||||
func getRoleEnum(roleStr string) Role {
|
||||
switch strings.ToLower(roleStr) {
|
||||
case "admin":
|
||||
return RoleAdmin
|
||||
case "support":
|
||||
return RoleSupport
|
||||
case "manager":
|
||||
return RoleManager
|
||||
case "user":
|
||||
return RoleUser
|
||||
case "api":
|
||||
return RoleApi
|
||||
case "anonymous":
|
||||
return RoleAnonymous
|
||||
default:
|
||||
return RoleError
|
||||
}
|
||||
}
|
||||
|
||||
func isValidRole(role string) bool {
|
||||
return getRoleEnum(role) != RoleError
|
||||
}
|
||||
|
||||
func (u *User) HasValidRole(role string) (hasRole bool, isValid bool) {
|
||||
if isValidRole(role) {
|
||||
for _, r := range u.Roles {
|
||||
if r == role {
|
||||
return true, true
|
||||
}
|
||||
}
|
||||
return false, true
|
||||
}
|
||||
return false, false
|
||||
}
|
||||
|
||||
func (u *User) HasRole(role Role) bool {
|
||||
for _, r := range u.Roles {
|
||||
if r == GetRoleString(role) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// Role-Arrays are short: performance not impacted by nested loop
|
||||
func (u *User) HasAnyRole(queryroles []Role) bool {
|
||||
for _, ur := range u.Roles {
|
||||
for _, qr := range queryroles {
|
||||
if ur == GetRoleString(qr) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// Role-Arrays are short: performance not impacted by nested loop
|
||||
func (u *User) HasAllRoles(queryroles []Role) bool {
|
||||
target := len(queryroles)
|
||||
matches := 0
|
||||
for _, ur := range u.Roles {
|
||||
for _, qr := range queryroles {
|
||||
if ur == GetRoleString(qr) {
|
||||
matches += 1
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if matches == target {
|
||||
return true
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
// Role-Arrays are short: performance not impacted by nested loop
|
||||
func (u *User) HasNotRoles(queryroles []Role) bool {
|
||||
matches := 0
|
||||
for _, ur := range u.Roles {
|
||||
for _, qr := range queryroles {
|
||||
if ur == GetRoleString(qr) {
|
||||
matches += 1
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if matches == 0 {
|
||||
return true
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
// Called by API endpoint '/roles/' from frontend: Only required for admin config -> Check Admin Role
|
||||
func GetValidRoles(user *User) ([]string, error) {
|
||||
var vals []string
|
||||
if user.HasRole(RoleAdmin) {
|
||||
for i := RoleApi; i < RoleError; i++ {
|
||||
vals = append(vals, GetRoleString(i))
|
||||
}
|
||||
return vals, nil
|
||||
}
|
||||
|
||||
return vals, fmt.Errorf("%s: only admins are allowed to fetch a list of roles", user.Username)
|
||||
}
|
||||
|
||||
// Called by routerConfig web.page setup in backend: Only requires known user
|
||||
func GetValidRolesMap(user *User) (map[string]Role, error) {
|
||||
named := make(map[string]Role)
|
||||
if user.HasNotRoles([]Role{RoleAnonymous}) {
|
||||
for i := RoleApi; i < RoleError; i++ {
|
||||
named[GetRoleString(i)] = i
|
||||
}
|
||||
return named, nil
|
||||
}
|
||||
return named, fmt.Errorf("only known users are allowed to fetch a list of roles")
|
||||
}
|
||||
|
||||
// Find highest role
|
||||
func (u *User) GetAuthLevel() Role {
|
||||
if u.HasRole(RoleAdmin) {
|
||||
return RoleAdmin
|
||||
} else if u.HasRole(RoleSupport) {
|
||||
return RoleSupport
|
||||
} else if u.HasRole(RoleManager) {
|
||||
return RoleManager
|
||||
} else if u.HasRole(RoleUser) {
|
||||
return RoleUser
|
||||
} else if u.HasRole(RoleApi) {
|
||||
return RoleApi
|
||||
} else if u.HasRole(RoleAnonymous) {
|
||||
return RoleAnonymous
|
||||
} else {
|
||||
return RoleError
|
||||
}
|
||||
}
|
||||
|
||||
func (u *User) HasProject(project string) bool {
|
||||
for _, p := range u.Projects {
|
||||
if p == project {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func GetUser(ctx context.Context) *User {
|
||||
x := ctx.Value(ContextUserKey)
|
||||
if x == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
return x.(*User)
|
||||
}
|
||||
|
||||
type Authenticator interface {
|
||||
Init(auth *Authentication, config interface{}) error
|
||||
CanLogin(user *User, rw http.ResponseWriter, r *http.Request) bool
|
||||
Login(user *User, rw http.ResponseWriter, r *http.Request) (*User, error)
|
||||
Auth(rw http.ResponseWriter, r *http.Request) (*User, error)
|
||||
CanLogin(user *schema.User, username string, rw http.ResponseWriter, r *http.Request) (*schema.User, bool)
|
||||
Login(user *schema.User, rw http.ResponseWriter, r *http.Request) (*schema.User, error)
|
||||
}
|
||||
|
||||
type ContextKey string
|
||||
|
||||
const ContextUserKey ContextKey = "user"
|
||||
|
||||
type Authentication struct {
|
||||
db *sqlx.DB
|
||||
sessionStore *sessions.CookieStore
|
||||
SessionMaxAge time.Duration
|
||||
|
||||
@ -234,10 +36,34 @@ type Authentication struct {
|
||||
LocalAuth *LocalAuthenticator
|
||||
}
|
||||
|
||||
func Init(db *sqlx.DB,
|
||||
configs map[string]interface{}) (*Authentication, error) {
|
||||
func (auth *Authentication) AuthViaSession(
|
||||
rw http.ResponseWriter,
|
||||
r *http.Request) (*schema.User, error) {
|
||||
session, err := auth.sessionStore.Get(r, "session")
|
||||
if err != nil {
|
||||
log.Error("Error while getting session store")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if session.IsNew {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
// TODO: Check if session keys exist
|
||||
username, _ := session.Values["username"].(string)
|
||||
projects, _ := session.Values["projects"].([]string)
|
||||
roles, _ := session.Values["roles"].([]string)
|
||||
return &schema.User{
|
||||
Username: username,
|
||||
Projects: projects,
|
||||
Roles: roles,
|
||||
AuthType: schema.AuthSession,
|
||||
AuthSource: -1,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func Init() (*Authentication, error) {
|
||||
auth := &Authentication{}
|
||||
auth.db = db
|
||||
|
||||
sessKey := os.Getenv("SESSION_KEY")
|
||||
if sessKey == "" {
|
||||
@ -257,78 +83,78 @@ func Init(db *sqlx.DB,
|
||||
auth.sessionStore = sessions.NewCookieStore(bytes)
|
||||
}
|
||||
|
||||
if config.Keys.LdapConfig != nil {
|
||||
ldapAuth := &LdapAuthenticator{}
|
||||
if err := ldapAuth.Init(); err != nil {
|
||||
log.Warn("Error while initializing authentication -> ldapAuth init failed")
|
||||
} else {
|
||||
auth.LdapAuth = ldapAuth
|
||||
auth.authenticators = append(auth.authenticators, auth.LdapAuth)
|
||||
}
|
||||
} else {
|
||||
log.Info("Missing LDAP configuration: No LDAP support!")
|
||||
}
|
||||
|
||||
if config.Keys.JwtConfig != nil {
|
||||
auth.JwtAuth = &JWTAuthenticator{}
|
||||
if err := auth.JwtAuth.Init(); err != nil {
|
||||
log.Error("Error while initializing authentication -> jwtAuth init failed")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
jwtSessionAuth := &JWTSessionAuthenticator{}
|
||||
if err := jwtSessionAuth.Init(); err != nil {
|
||||
log.Info("jwtSessionAuth init failed: No JWT login support!")
|
||||
} else {
|
||||
auth.authenticators = append(auth.authenticators, jwtSessionAuth)
|
||||
}
|
||||
|
||||
jwtCookieSessionAuth := &JWTCookieSessionAuthenticator{}
|
||||
if err := jwtCookieSessionAuth.Init(); err != nil {
|
||||
log.Info("jwtCookieSessionAuth init failed: No JWT cookie login support!")
|
||||
} else {
|
||||
auth.authenticators = append(auth.authenticators, jwtCookieSessionAuth)
|
||||
}
|
||||
} else {
|
||||
log.Info("Missing JWT configuration: No JWT token support!")
|
||||
}
|
||||
|
||||
auth.LocalAuth = &LocalAuthenticator{}
|
||||
if err := auth.LocalAuth.Init(auth, nil); err != nil {
|
||||
if err := auth.LocalAuth.Init(); err != nil {
|
||||
log.Error("Error while initializing authentication -> localAuth init failed")
|
||||
return nil, err
|
||||
}
|
||||
auth.authenticators = append(auth.authenticators, auth.LocalAuth)
|
||||
|
||||
auth.JwtAuth = &JWTAuthenticator{}
|
||||
if err := auth.JwtAuth.Init(auth, configs["jwt"]); err != nil {
|
||||
log.Error("Error while initializing authentication -> jwtAuth init failed")
|
||||
return nil, err
|
||||
}
|
||||
auth.authenticators = append(auth.authenticators, auth.JwtAuth)
|
||||
|
||||
if config, ok := configs["ldap"]; ok {
|
||||
auth.LdapAuth = &LdapAuthenticator{}
|
||||
if err := auth.LdapAuth.Init(auth, config); err != nil {
|
||||
log.Error("Error while initializing authentication -> ldapAuth init failed")
|
||||
return nil, err
|
||||
}
|
||||
auth.authenticators = append(auth.authenticators, auth.LdapAuth)
|
||||
}
|
||||
|
||||
return auth, nil
|
||||
}
|
||||
|
||||
func (auth *Authentication) AuthViaSession(
|
||||
rw http.ResponseWriter,
|
||||
r *http.Request) (*User, error) {
|
||||
|
||||
session, err := auth.sessionStore.Get(r, "session")
|
||||
if err != nil {
|
||||
log.Error("Error while getting session store")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if session.IsNew {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
// TODO Check if keys are present in session?
|
||||
username, _ := session.Values["username"].(string)
|
||||
projects, _ := session.Values["projects"].([]string)
|
||||
roles, _ := session.Values["roles"].([]string)
|
||||
return &User{
|
||||
Username: username,
|
||||
Projects: projects,
|
||||
Roles: roles,
|
||||
AuthSource: -1,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Handle a POST request that should log the user in, starting a new session.
|
||||
func (auth *Authentication) Login(
|
||||
onsuccess http.Handler,
|
||||
onfailure func(rw http.ResponseWriter, r *http.Request, loginErr error)) http.Handler {
|
||||
|
||||
return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
|
||||
err := errors.New("no authenticator applied")
|
||||
username := r.FormValue("username")
|
||||
user := (*User)(nil)
|
||||
var dbUser *schema.User
|
||||
|
||||
if username != "" {
|
||||
user, _ = auth.GetUser(username)
|
||||
var err error
|
||||
dbUser, err = repository.GetUserRepository().GetUser(username)
|
||||
if err != nil && err != sql.ErrNoRows {
|
||||
log.Errorf("Error while loading user '%v'", username)
|
||||
}
|
||||
}
|
||||
|
||||
for _, authenticator := range auth.authenticators {
|
||||
if !authenticator.CanLogin(user, rw, r) {
|
||||
var ok bool
|
||||
var user *schema.User
|
||||
if user, ok = authenticator.CanLogin(dbUser, username, rw, r); !ok {
|
||||
continue
|
||||
} else {
|
||||
log.Debugf("Can login with user %v", user)
|
||||
}
|
||||
|
||||
user, err = authenticator.Login(user, rw, r)
|
||||
user, err := authenticator.Login(user, rw, r)
|
||||
if err != nil {
|
||||
log.Warnf("user login failed: %s", err.Error())
|
||||
onfailure(rw, r, err)
|
||||
@ -355,49 +181,50 @@ func (auth *Authentication) Login(
|
||||
}
|
||||
|
||||
log.Infof("login successfull: user: %#v (roles: %v, projects: %v)", user.Username, user.Roles, user.Projects)
|
||||
ctx := context.WithValue(r.Context(), ContextUserKey, user)
|
||||
ctx := context.WithValue(r.Context(), repository.ContextUserKey, user)
|
||||
onsuccess.ServeHTTP(rw, r.WithContext(ctx))
|
||||
return
|
||||
}
|
||||
|
||||
log.Debugf("login failed: no authenticator applied")
|
||||
onfailure(rw, r, err)
|
||||
onfailure(rw, r, errors.New("no authenticator applied"))
|
||||
})
|
||||
}
|
||||
|
||||
// Authenticate the user and put a User object in the
|
||||
// context of the request. If authentication fails,
|
||||
// do not continue but send client to the login screen.
|
||||
func (auth *Authentication) Auth(
|
||||
onsuccess http.Handler,
|
||||
onfailure func(rw http.ResponseWriter, r *http.Request, authErr error)) http.Handler {
|
||||
|
||||
return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
|
||||
for _, authenticator := range auth.authenticators {
|
||||
user, err := authenticator.Auth(rw, r)
|
||||
|
||||
user, err := auth.JwtAuth.AuthViaJWT(rw, r)
|
||||
if err != nil {
|
||||
log.Infof("authentication failed: %s", err.Error())
|
||||
http.Error(rw, err.Error(), http.StatusUnauthorized)
|
||||
return
|
||||
}
|
||||
|
||||
if user == nil {
|
||||
user, err = auth.AuthViaSession(rw, r)
|
||||
if err != nil {
|
||||
log.Infof("authentication failed: %s", err.Error())
|
||||
http.Error(rw, err.Error(), http.StatusUnauthorized)
|
||||
return
|
||||
}
|
||||
if user == nil {
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
ctx := context.WithValue(r.Context(), ContextUserKey, user)
|
||||
if user != nil {
|
||||
ctx := context.WithValue(r.Context(), repository.ContextUserKey, user)
|
||||
onsuccess.ServeHTTP(rw, r.WithContext(ctx))
|
||||
return
|
||||
}
|
||||
|
||||
log.Debugf("authentication failed: %s", "no authenticator applied")
|
||||
// http.Error(rw, http.StatusText(http.StatusUnauthorized), http.StatusUnauthorized)
|
||||
onfailure(rw, r, errors.New("unauthorized (login first or use a token)"))
|
||||
log.Debug("authentication failed")
|
||||
onfailure(rw, r, errors.New("unauthorized (please login first)"))
|
||||
})
|
||||
}
|
||||
|
||||
// Clears the session cookie
|
||||
func (auth *Authentication) Logout(onsuccess http.Handler) http.Handler {
|
||||
|
||||
return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
|
||||
session, err := auth.sessionStore.Get(r, "session")
|
||||
if err != nil {
|
||||
|
@ -6,39 +6,26 @@ package auth
|
||||
|
||||
import (
|
||||
"crypto/ed25519"
|
||||
"database/sql"
|
||||
"encoding/base64"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"os"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/ClusterCockpit/cc-backend/internal/config"
|
||||
"github.com/ClusterCockpit/cc-backend/internal/repository"
|
||||
"github.com/ClusterCockpit/cc-backend/pkg/log"
|
||||
"github.com/ClusterCockpit/cc-backend/pkg/schema"
|
||||
"github.com/golang-jwt/jwt/v4"
|
||||
)
|
||||
|
||||
type JWTAuthenticator struct {
|
||||
auth *Authentication
|
||||
|
||||
publicKey ed25519.PublicKey
|
||||
privateKey ed25519.PrivateKey
|
||||
publicKeyCrossLogin ed25519.PublicKey // For accepting externally generated JWTs
|
||||
|
||||
loginTokenKey []byte // HS256 key
|
||||
|
||||
config *schema.JWTAuthConfig
|
||||
publicKey ed25519.PublicKey
|
||||
privateKey ed25519.PrivateKey
|
||||
}
|
||||
|
||||
var _ Authenticator = (*JWTAuthenticator)(nil)
|
||||
|
||||
func (ja *JWTAuthenticator) Init(auth *Authentication, conf interface{}) error {
|
||||
|
||||
ja.auth = auth
|
||||
ja.config = conf.(*schema.JWTAuthConfig)
|
||||
|
||||
func (ja *JWTAuthenticator) Init() error {
|
||||
pubKey, privKey := os.Getenv("JWT_PUBLIC_KEY"), os.Getenv("JWT_PRIVATE_KEY")
|
||||
if pubKey == "" || privKey == "" {
|
||||
log.Warn("environment variables 'JWT_PUBLIC_KEY' or 'JWT_PRIVATE_KEY' not set (token based authentication will not work)")
|
||||
@ -57,130 +44,12 @@ func (ja *JWTAuthenticator) Init(auth *Authentication, conf interface{}) error {
|
||||
ja.privateKey = ed25519.PrivateKey(bytes)
|
||||
}
|
||||
|
||||
if pubKey = os.Getenv("CROSS_LOGIN_JWT_HS512_KEY"); pubKey != "" {
|
||||
bytes, err := base64.StdEncoding.DecodeString(pubKey)
|
||||
if err != nil {
|
||||
log.Warn("Could not decode cross login JWT HS512 key")
|
||||
return err
|
||||
}
|
||||
ja.loginTokenKey = bytes
|
||||
}
|
||||
|
||||
// Look for external public keys
|
||||
pubKeyCrossLogin, keyFound := os.LookupEnv("CROSS_LOGIN_JWT_PUBLIC_KEY")
|
||||
if keyFound && pubKeyCrossLogin != "" {
|
||||
bytes, err := base64.StdEncoding.DecodeString(pubKeyCrossLogin)
|
||||
if err != nil {
|
||||
log.Warn("Could not decode cross login JWT public key")
|
||||
return err
|
||||
}
|
||||
ja.publicKeyCrossLogin = ed25519.PublicKey(bytes)
|
||||
|
||||
// Warn if other necessary settings are not configured
|
||||
if ja.config != nil {
|
||||
if ja.config.CookieName == "" {
|
||||
log.Warn("cookieName for JWTs not configured (cross login via JWT cookie will fail)")
|
||||
}
|
||||
if !ja.config.ForceJWTValidationViaDatabase {
|
||||
log.Warn("forceJWTValidationViaDatabase not set to true: CC will accept users and roles defined in JWTs regardless of its own database!")
|
||||
}
|
||||
if ja.config.TrustedExternalIssuer == "" {
|
||||
log.Warn("trustedExternalIssuer for JWTs not configured (cross login via JWT cookie will fail)")
|
||||
}
|
||||
} else {
|
||||
log.Warn("cookieName and trustedExternalIssuer for JWTs not configured (cross login via JWT cookie will fail)")
|
||||
}
|
||||
} else {
|
||||
ja.publicKeyCrossLogin = nil
|
||||
log.Debug("environment variable 'CROSS_LOGIN_JWT_PUBLIC_KEY' not set (cross login token based authentication will not work)")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (ja *JWTAuthenticator) CanLogin(
|
||||
user *User,
|
||||
func (ja *JWTAuthenticator) AuthViaJWT(
|
||||
rw http.ResponseWriter,
|
||||
r *http.Request) bool {
|
||||
|
||||
return (user != nil && user.AuthSource == AuthViaToken) ||
|
||||
r.Header.Get("Authorization") != "" ||
|
||||
r.URL.Query().Get("login-token") != ""
|
||||
}
|
||||
|
||||
func (ja *JWTAuthenticator) Login(
|
||||
user *User,
|
||||
rw http.ResponseWriter,
|
||||
r *http.Request) (*User, error) {
|
||||
|
||||
rawtoken := strings.TrimPrefix(r.Header.Get("Authorization"), "Bearer ")
|
||||
if rawtoken == "" {
|
||||
rawtoken = r.URL.Query().Get("login-token")
|
||||
}
|
||||
|
||||
token, err := jwt.Parse(rawtoken, func(t *jwt.Token) (interface{}, error) {
|
||||
if t.Method == jwt.SigningMethodEdDSA {
|
||||
return ja.publicKey, nil
|
||||
}
|
||||
if t.Method == jwt.SigningMethodHS256 || t.Method == jwt.SigningMethodHS512 {
|
||||
return ja.loginTokenKey, nil
|
||||
}
|
||||
return nil, fmt.Errorf("AUTH/JWT > unkown signing method for login token: %s (known: HS256, HS512, EdDSA)", t.Method.Alg())
|
||||
})
|
||||
if err != nil {
|
||||
log.Warn("Error while parsing jwt token")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err = token.Claims.Valid(); err != nil {
|
||||
log.Warn("jwt token claims are not valid")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
claims := token.Claims.(jwt.MapClaims)
|
||||
sub, _ := claims["sub"].(string)
|
||||
exp, _ := claims["exp"].(float64)
|
||||
var roles []string
|
||||
if rawroles, ok := claims["roles"].([]interface{}); ok {
|
||||
for _, rr := range rawroles {
|
||||
if r, ok := rr.(string); ok {
|
||||
if isValidRole(r) {
|
||||
roles = append(roles, r)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if rawrole, ok := claims["roles"].(string); ok {
|
||||
if isValidRole(rawrole) {
|
||||
roles = append(roles, rawrole)
|
||||
}
|
||||
}
|
||||
|
||||
if user == nil {
|
||||
user, err = ja.auth.GetUser(sub)
|
||||
if err != nil && err != sql.ErrNoRows {
|
||||
log.Errorf("Error while loading user '%v'", sub)
|
||||
return nil, err
|
||||
} else if user == nil {
|
||||
user = &User{
|
||||
Username: sub,
|
||||
Roles: roles,
|
||||
AuthSource: AuthViaToken,
|
||||
}
|
||||
if err := ja.auth.AddUser(user); err != nil {
|
||||
log.Errorf("Error while adding user '%v' to auth from token", user.Username)
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
user.Expiration = time.Unix(int64(exp), 0)
|
||||
return user, nil
|
||||
}
|
||||
|
||||
func (ja *JWTAuthenticator) Auth(
|
||||
rw http.ResponseWriter,
|
||||
r *http.Request) (*User, error) {
|
||||
r *http.Request) (*schema.User, error) {
|
||||
|
||||
rawtoken := r.Header.Get("X-Auth-Token")
|
||||
if rawtoken == "" {
|
||||
@ -188,59 +57,22 @@ func (ja *JWTAuthenticator) Auth(
|
||||
rawtoken = strings.TrimPrefix(rawtoken, "Bearer ")
|
||||
}
|
||||
|
||||
// If no auth header was found, check for a certain cookie containing a JWT
|
||||
cookieName := ""
|
||||
cookieFound := false
|
||||
if ja.config != nil && ja.config.CookieName != "" {
|
||||
cookieName = ja.config.CookieName
|
||||
}
|
||||
|
||||
// Try to read the JWT cookie
|
||||
if rawtoken == "" && cookieName != "" {
|
||||
jwtCookie, err := r.Cookie(cookieName)
|
||||
|
||||
if err == nil && jwtCookie.Value != "" {
|
||||
rawtoken = jwtCookie.Value
|
||||
cookieFound = true
|
||||
}
|
||||
}
|
||||
|
||||
// Because a user can also log in via a token, the
|
||||
// session cookie must be checked here as well:
|
||||
// there is no token
|
||||
if rawtoken == "" {
|
||||
return ja.auth.AuthViaSession(rw, r)
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
// Try to parse JWT
|
||||
token, err := jwt.Parse(rawtoken, func(t *jwt.Token) (interface{}, error) {
|
||||
if t.Method != jwt.SigningMethodEdDSA {
|
||||
return nil, errors.New("only Ed25519/EdDSA supported")
|
||||
}
|
||||
|
||||
// Is there more than one public key?
|
||||
if ja.publicKeyCrossLogin != nil &&
|
||||
ja.config != nil &&
|
||||
ja.config.TrustedExternalIssuer != "" {
|
||||
|
||||
// Determine whether to use the external public key
|
||||
unvalidatedIssuer, success := t.Claims.(jwt.MapClaims)["iss"].(string)
|
||||
if success && unvalidatedIssuer == ja.config.TrustedExternalIssuer {
|
||||
// The (unvalidated) issuer seems to be the expected one,
|
||||
// use public cross login key from config
|
||||
return ja.publicKeyCrossLogin, nil
|
||||
}
|
||||
}
|
||||
|
||||
// No cross login key configured or issuer not expected
|
||||
// Try own key
|
||||
return ja.publicKey, nil
|
||||
})
|
||||
if err != nil {
|
||||
log.Warn("Error while parsing token")
|
||||
log.Warn("Error while parsing JWT token")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Check token validity
|
||||
if err := token.Claims.Valid(); err != nil {
|
||||
log.Warn("jwt token claims are not valid")
|
||||
return nil, err
|
||||
@ -253,15 +85,15 @@ func (ja *JWTAuthenticator) Auth(
|
||||
var roles []string
|
||||
|
||||
// Validate user + roles from JWT against database?
|
||||
if ja.config != nil && ja.config.ForceJWTValidationViaDatabase {
|
||||
user, err := ja.auth.GetUser(sub)
|
||||
if config.Keys.JwtConfig.ValidateUser {
|
||||
ur := repository.GetUserRepository()
|
||||
user, err := ur.GetUser(sub)
|
||||
|
||||
// Deny any logins for unknown usernames
|
||||
if err != nil {
|
||||
log.Warn("Could not find user from JWT in internal database.")
|
||||
return nil, errors.New("unknown user")
|
||||
}
|
||||
|
||||
// Take user roles from database instead of trusting the JWT
|
||||
roles = user.Roles
|
||||
} else {
|
||||
@ -275,47 +107,16 @@ func (ja *JWTAuthenticator) Auth(
|
||||
}
|
||||
}
|
||||
|
||||
if cookieFound {
|
||||
// Create a session so that we no longer need the JTW Cookie
|
||||
session, err := ja.auth.sessionStore.New(r, "session")
|
||||
if err != nil {
|
||||
log.Errorf("session creation failed: %s", err.Error())
|
||||
http.Error(rw, err.Error(), http.StatusInternalServerError)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if ja.auth.SessionMaxAge != 0 {
|
||||
session.Options.MaxAge = int(ja.auth.SessionMaxAge.Seconds())
|
||||
}
|
||||
session.Values["username"] = sub
|
||||
session.Values["roles"] = roles
|
||||
|
||||
if err := ja.auth.sessionStore.Save(r, rw, session); err != nil {
|
||||
log.Warnf("session save failed: %s", err.Error())
|
||||
http.Error(rw, err.Error(), http.StatusInternalServerError)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// (Ask browser to) Delete JWT cookie
|
||||
deletedCookie := &http.Cookie{
|
||||
Name: cookieName,
|
||||
Value: "",
|
||||
Path: "/",
|
||||
MaxAge: -1,
|
||||
HttpOnly: true,
|
||||
}
|
||||
http.SetCookie(rw, deletedCookie)
|
||||
}
|
||||
|
||||
return &User{
|
||||
return &schema.User{
|
||||
Username: sub,
|
||||
Roles: roles,
|
||||
AuthSource: AuthViaToken,
|
||||
AuthType: schema.AuthToken,
|
||||
AuthSource: -1,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Generate a new JWT that can be used for authentication
|
||||
func (ja *JWTAuthenticator) ProvideJWT(user *User) (string, error) {
|
||||
func (ja *JWTAuthenticator) ProvideJWT(user *schema.User) (string, error) {
|
||||
|
||||
if ja.privateKey == nil {
|
||||
return "", errors.New("environment variable 'JWT_PRIVATE_KEY' not set")
|
||||
@ -327,8 +128,12 @@ func (ja *JWTAuthenticator) ProvideJWT(user *User) (string, error) {
|
||||
"roles": user.Roles,
|
||||
"iat": now.Unix(),
|
||||
}
|
||||
if ja.config != nil && ja.config.MaxAge != 0 {
|
||||
claims["exp"] = now.Add(time.Duration(ja.config.MaxAge)).Unix()
|
||||
if config.Keys.JwtConfig.MaxAge != "" {
|
||||
d, err := time.ParseDuration(config.Keys.JwtConfig.MaxAge)
|
||||
if err != nil {
|
||||
return "", errors.New("cannot parse max-age config key")
|
||||
}
|
||||
claims["exp"] = now.Add(d).Unix()
|
||||
}
|
||||
|
||||
return jwt.NewWithClaims(jwt.SigningMethodEdDSA, claims).SignedString(ja.privateKey)
|
||||
|
219
internal/auth/jwtCookieSession.go
Normal file
219
internal/auth/jwtCookieSession.go
Normal file
@ -0,0 +1,219 @@
|
||||
// Copyright (C) 2023 NHR@FAU, University Erlangen-Nuremberg.
|
||||
// All rights reserved.
|
||||
// Use of this source code is governed by a MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
package auth
|
||||
|
||||
import (
|
||||
"crypto/ed25519"
|
||||
"encoding/base64"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"os"
|
||||
|
||||
"github.com/ClusterCockpit/cc-backend/internal/config"
|
||||
"github.com/ClusterCockpit/cc-backend/internal/repository"
|
||||
"github.com/ClusterCockpit/cc-backend/pkg/log"
|
||||
"github.com/ClusterCockpit/cc-backend/pkg/schema"
|
||||
"github.com/golang-jwt/jwt/v4"
|
||||
)
|
||||
|
||||
type JWTCookieSessionAuthenticator struct {
|
||||
publicKey ed25519.PublicKey
|
||||
privateKey ed25519.PrivateKey
|
||||
publicKeyCrossLogin ed25519.PublicKey // For accepting externally generated JWTs
|
||||
}
|
||||
|
||||
var _ Authenticator = (*JWTCookieSessionAuthenticator)(nil)
|
||||
|
||||
func (ja *JWTCookieSessionAuthenticator) Init() error {
|
||||
pubKey, privKey := os.Getenv("JWT_PUBLIC_KEY"), os.Getenv("JWT_PRIVATE_KEY")
|
||||
if pubKey == "" || privKey == "" {
|
||||
log.Warn("environment variables 'JWT_PUBLIC_KEY' or 'JWT_PRIVATE_KEY' not set (token based authentication will not work)")
|
||||
return errors.New("environment variables 'JWT_PUBLIC_KEY' or 'JWT_PRIVATE_KEY' not set (token based authentication will not work)")
|
||||
} else {
|
||||
bytes, err := base64.StdEncoding.DecodeString(pubKey)
|
||||
if err != nil {
|
||||
log.Warn("Could not decode JWT public key")
|
||||
return err
|
||||
}
|
||||
ja.publicKey = ed25519.PublicKey(bytes)
|
||||
bytes, err = base64.StdEncoding.DecodeString(privKey)
|
||||
if err != nil {
|
||||
log.Warn("Could not decode JWT private key")
|
||||
return err
|
||||
}
|
||||
ja.privateKey = ed25519.PrivateKey(bytes)
|
||||
}
|
||||
|
||||
// Look for external public keys
|
||||
pubKeyCrossLogin, keyFound := os.LookupEnv("CROSS_LOGIN_JWT_PUBLIC_KEY")
|
||||
if keyFound && pubKeyCrossLogin != "" {
|
||||
bytes, err := base64.StdEncoding.DecodeString(pubKeyCrossLogin)
|
||||
if err != nil {
|
||||
log.Warn("Could not decode cross login JWT public key")
|
||||
return err
|
||||
}
|
||||
ja.publicKeyCrossLogin = ed25519.PublicKey(bytes)
|
||||
} else {
|
||||
ja.publicKeyCrossLogin = nil
|
||||
log.Debug("environment variable 'CROSS_LOGIN_JWT_PUBLIC_KEY' not set (cross login token based authentication will not work)")
|
||||
return errors.New("environment variable 'CROSS_LOGIN_JWT_PUBLIC_KEY' not set (cross login token based authentication will not work)")
|
||||
}
|
||||
|
||||
jc := config.Keys.JwtConfig
|
||||
// Warn if other necessary settings are not configured
|
||||
if jc != nil {
|
||||
if jc.CookieName == "" {
|
||||
log.Info("cookieName for JWTs not configured (cross login via JWT cookie will fail)")
|
||||
return errors.New("cookieName for JWTs not configured (cross login via JWT cookie will fail)")
|
||||
}
|
||||
if !jc.ValidateUser {
|
||||
log.Info("forceJWTValidationViaDatabase not set to true: CC will accept users and roles defined in JWTs regardless of its own database!")
|
||||
}
|
||||
if jc.TrustedIssuer == "" {
|
||||
log.Info("trustedExternalIssuer for JWTs not configured (cross login via JWT cookie will fail)")
|
||||
return errors.New("trustedExternalIssuer for JWTs not configured (cross login via JWT cookie will fail)")
|
||||
}
|
||||
} else {
|
||||
log.Warn("config for JWTs not configured (cross login via JWT cookie will fail)")
|
||||
return errors.New("config for JWTs not configured (cross login via JWT cookie will fail)")
|
||||
}
|
||||
|
||||
log.Info("JWT Cookie Session authenticator successfully registered")
|
||||
return nil
|
||||
}
|
||||
|
||||
func (ja *JWTCookieSessionAuthenticator) CanLogin(
|
||||
user *schema.User,
|
||||
username string,
|
||||
rw http.ResponseWriter,
|
||||
r *http.Request) (*schema.User, bool) {
|
||||
|
||||
jc := config.Keys.JwtConfig
|
||||
cookieName := ""
|
||||
if jc.CookieName != "" {
|
||||
cookieName = jc.CookieName
|
||||
}
|
||||
|
||||
// Try to read the JWT cookie
|
||||
if cookieName != "" {
|
||||
jwtCookie, err := r.Cookie(cookieName)
|
||||
|
||||
if err == nil && jwtCookie.Value != "" {
|
||||
return user, true
|
||||
}
|
||||
}
|
||||
|
||||
return nil, false
|
||||
}
|
||||
|
||||
func (ja *JWTCookieSessionAuthenticator) Login(
|
||||
user *schema.User,
|
||||
rw http.ResponseWriter,
|
||||
r *http.Request) (*schema.User, error) {
|
||||
|
||||
jc := config.Keys.JwtConfig
|
||||
jwtCookie, err := r.Cookie(jc.CookieName)
|
||||
var rawtoken string
|
||||
|
||||
if err == nil && jwtCookie.Value != "" {
|
||||
rawtoken = jwtCookie.Value
|
||||
}
|
||||
|
||||
token, err := jwt.Parse(rawtoken, func(t *jwt.Token) (interface{}, error) {
|
||||
if t.Method != jwt.SigningMethodEdDSA {
|
||||
return nil, errors.New("only Ed25519/EdDSA supported")
|
||||
}
|
||||
|
||||
unvalidatedIssuer, success := t.Claims.(jwt.MapClaims)["iss"].(string)
|
||||
if success && unvalidatedIssuer == jc.TrustedIssuer {
|
||||
// The (unvalidated) issuer seems to be the expected one,
|
||||
// use public cross login key from config
|
||||
return ja.publicKeyCrossLogin, nil
|
||||
}
|
||||
|
||||
// No cross login key configured or issuer not expected
|
||||
// Try own key
|
||||
return ja.publicKey, nil
|
||||
})
|
||||
if err != nil {
|
||||
log.Warn("JWT cookie session: error while parsing token")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Check token validity and extract paypload
|
||||
if err := token.Claims.Valid(); err != nil {
|
||||
log.Warn("jwt token claims are not valid")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
claims := token.Claims.(jwt.MapClaims)
|
||||
sub, _ := claims["sub"].(string)
|
||||
|
||||
var name string
|
||||
if wrap, ok := claims["name"].(map[string]interface{}); ok {
|
||||
if vals, ok := wrap["values"].([]interface{}); ok {
|
||||
if len(vals) != 0 {
|
||||
name = fmt.Sprintf("%v", vals[0])
|
||||
|
||||
for i := 1; i < len(vals); i++ {
|
||||
name += fmt.Sprintf(" %v", vals[i])
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var roles []string
|
||||
|
||||
if jc.ValidateUser {
|
||||
// Deny any logins for unknown usernames
|
||||
if user == nil {
|
||||
log.Warn("Could not find user from JWT in internal database.")
|
||||
return nil, errors.New("unknown user")
|
||||
}
|
||||
|
||||
// Take user roles from database instead of trusting the JWT
|
||||
roles = user.Roles
|
||||
} else {
|
||||
// Extract roles from JWT (if present)
|
||||
if rawroles, ok := claims["roles"].([]interface{}); ok {
|
||||
for _, rr := range rawroles {
|
||||
if r, ok := rr.(string); ok {
|
||||
roles = append(roles, r)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// (Ask browser to) Delete JWT cookie
|
||||
deletedCookie := &http.Cookie{
|
||||
Name: jc.CookieName,
|
||||
Value: "",
|
||||
Path: "/",
|
||||
MaxAge: -1,
|
||||
HttpOnly: true,
|
||||
}
|
||||
http.SetCookie(rw, deletedCookie)
|
||||
|
||||
if user == nil {
|
||||
projects := make([]string, 0)
|
||||
user = &schema.User{
|
||||
Username: sub,
|
||||
Name: name,
|
||||
Roles: roles,
|
||||
Projects: projects,
|
||||
AuthType: schema.AuthSession,
|
||||
AuthSource: schema.AuthViaToken,
|
||||
}
|
||||
|
||||
if jc.SyncUserOnLogin {
|
||||
if err := repository.GetUserRepository().AddUser(user); err != nil {
|
||||
log.Errorf("Error while adding user '%s' to DB", user.Username)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return user, nil
|
||||
}
|
150
internal/auth/jwtSession.go
Normal file
150
internal/auth/jwtSession.go
Normal file
@ -0,0 +1,150 @@
|
||||
// Copyright (C) 2022 NHR@FAU, University Erlangen-Nuremberg.
|
||||
// All rights reserved.
|
||||
// Use of this source code is governed by a MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
package auth
|
||||
|
||||
import (
|
||||
"encoding/base64"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/ClusterCockpit/cc-backend/internal/config"
|
||||
"github.com/ClusterCockpit/cc-backend/internal/repository"
|
||||
"github.com/ClusterCockpit/cc-backend/pkg/log"
|
||||
"github.com/ClusterCockpit/cc-backend/pkg/schema"
|
||||
"github.com/golang-jwt/jwt/v4"
|
||||
)
|
||||
|
||||
type JWTSessionAuthenticator struct {
|
||||
loginTokenKey []byte // HS256 key
|
||||
}
|
||||
|
||||
var _ Authenticator = (*JWTSessionAuthenticator)(nil)
|
||||
|
||||
func (ja *JWTSessionAuthenticator) Init() error {
|
||||
if pubKey := os.Getenv("CROSS_LOGIN_JWT_HS512_KEY"); pubKey != "" {
|
||||
bytes, err := base64.StdEncoding.DecodeString(pubKey)
|
||||
if err != nil {
|
||||
log.Warn("Could not decode cross login JWT HS512 key")
|
||||
return err
|
||||
}
|
||||
ja.loginTokenKey = bytes
|
||||
}
|
||||
|
||||
log.Info("JWT Session authenticator successfully registered")
|
||||
return nil
|
||||
}
|
||||
|
||||
func (ja *JWTSessionAuthenticator) CanLogin(
|
||||
user *schema.User,
|
||||
username string,
|
||||
rw http.ResponseWriter,
|
||||
r *http.Request) (*schema.User, bool) {
|
||||
|
||||
return user, r.Header.Get("Authorization") != "" ||
|
||||
r.URL.Query().Get("login-token") != ""
|
||||
}
|
||||
|
||||
func (ja *JWTSessionAuthenticator) Login(
|
||||
user *schema.User,
|
||||
rw http.ResponseWriter,
|
||||
r *http.Request) (*schema.User, error) {
|
||||
|
||||
rawtoken := strings.TrimPrefix(r.Header.Get("Authorization"), "Bearer ")
|
||||
if rawtoken == "" {
|
||||
rawtoken = r.URL.Query().Get("login-token")
|
||||
}
|
||||
|
||||
token, err := jwt.Parse(rawtoken, func(t *jwt.Token) (interface{}, error) {
|
||||
if t.Method == jwt.SigningMethodHS256 || t.Method == jwt.SigningMethodHS512 {
|
||||
return ja.loginTokenKey, nil
|
||||
}
|
||||
return nil, fmt.Errorf("unkown signing method for login token: %s (known: HS256, HS512, EdDSA)", t.Method.Alg())
|
||||
})
|
||||
if err != nil {
|
||||
log.Warn("Error while parsing jwt token")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err = token.Claims.Valid(); err != nil {
|
||||
log.Warn("jwt token claims are not valid")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
claims := token.Claims.(jwt.MapClaims)
|
||||
sub, _ := claims["sub"].(string)
|
||||
|
||||
var name string
|
||||
if wrap, ok := claims["name"].(map[string]interface{}); ok {
|
||||
if vals, ok := wrap["values"].([]interface{}); ok {
|
||||
if len(vals) != 0 {
|
||||
name = fmt.Sprintf("%v", vals[0])
|
||||
|
||||
for i := 1; i < len(vals); i++ {
|
||||
name += fmt.Sprintf(" %v", vals[i])
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var roles []string
|
||||
|
||||
if config.Keys.JwtConfig.ValidateUser {
|
||||
// Deny any logins for unknown usernames
|
||||
if user == nil {
|
||||
log.Warn("Could not find user from JWT in internal database.")
|
||||
return nil, errors.New("unknown user")
|
||||
}
|
||||
|
||||
// Take user roles from database instead of trusting the JWT
|
||||
roles = user.Roles
|
||||
} else {
|
||||
// Extract roles from JWT (if present)
|
||||
if rawroles, ok := claims["roles"].([]interface{}); ok {
|
||||
for _, rr := range rawroles {
|
||||
if r, ok := rr.(string); ok {
|
||||
if schema.IsValidRole(r) {
|
||||
roles = append(roles, r)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
projects := make([]string, 0)
|
||||
// Java/Grails Issued Token
|
||||
// if rawprojs, ok := claims["projects"].([]interface{}); ok {
|
||||
// for _, pp := range rawprojs {
|
||||
// if p, ok := pp.(string); ok {
|
||||
// projects = append(projects, p)
|
||||
// }
|
||||
// }
|
||||
// } else if rawprojs, ok := claims["projects"]; ok {
|
||||
// for _, p := range rawprojs.([]string) {
|
||||
// projects = append(projects, p)
|
||||
// }
|
||||
// }
|
||||
|
||||
if user == nil {
|
||||
user = &schema.User{
|
||||
Username: sub,
|
||||
Name: name,
|
||||
Roles: roles,
|
||||
Projects: projects,
|
||||
AuthType: schema.AuthSession,
|
||||
AuthSource: schema.AuthViaToken,
|
||||
}
|
||||
|
||||
if config.Keys.JwtConfig.SyncUserOnLogin {
|
||||
if err := repository.GetUserRepository().AddUser(user); err != nil {
|
||||
log.Errorf("Error while adding user '%s' to DB", user.Username)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return user, nil
|
||||
}
|
@ -1,4 +1,4 @@
|
||||
// Copyright (C) 2022 NHR@FAU, University Erlangen-Nuremberg.
|
||||
// Copyright (C) 2023 NHR@FAU, University Erlangen-Nuremberg.
|
||||
// All rights reserved.
|
||||
// Use of this source code is governed by a MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
@ -12,35 +12,33 @@ import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/ClusterCockpit/cc-backend/internal/config"
|
||||
"github.com/ClusterCockpit/cc-backend/internal/repository"
|
||||
"github.com/ClusterCockpit/cc-backend/pkg/log"
|
||||
"github.com/ClusterCockpit/cc-backend/pkg/schema"
|
||||
"github.com/go-ldap/ldap/v3"
|
||||
)
|
||||
|
||||
type LdapAuthenticator struct {
|
||||
auth *Authentication
|
||||
config *schema.LdapConfig
|
||||
syncPassword string
|
||||
UserAttr string
|
||||
}
|
||||
|
||||
var _ Authenticator = (*LdapAuthenticator)(nil)
|
||||
|
||||
func (la *LdapAuthenticator) Init(
|
||||
auth *Authentication,
|
||||
conf interface{}) error {
|
||||
|
||||
la.auth = auth
|
||||
la.config = conf.(*schema.LdapConfig)
|
||||
|
||||
func (la *LdapAuthenticator) Init() error {
|
||||
la.syncPassword = os.Getenv("LDAP_ADMIN_PASSWORD")
|
||||
if la.syncPassword == "" {
|
||||
log.Warn("environment variable 'LDAP_ADMIN_PASSWORD' not set (ldap sync will not work)")
|
||||
}
|
||||
|
||||
if la.config != nil && la.config.SyncInterval != "" {
|
||||
interval, err := time.ParseDuration(la.config.SyncInterval)
|
||||
lc := config.Keys.LdapConfig
|
||||
|
||||
if lc.SyncInterval != "" {
|
||||
interval, err := time.ParseDuration(lc.SyncInterval)
|
||||
if err != nil {
|
||||
log.Warnf("Could not parse duration for sync interval: %v", la.config.SyncInterval)
|
||||
log.Warnf("Could not parse duration for sync interval: %v",
|
||||
lc.SyncInterval)
|
||||
return err
|
||||
}
|
||||
|
||||
@ -59,23 +57,88 @@ func (la *LdapAuthenticator) Init(
|
||||
log.Print("sync done")
|
||||
}
|
||||
}()
|
||||
} else {
|
||||
log.Info("LDAP configuration key sync_interval invalid")
|
||||
}
|
||||
|
||||
if lc.UserAttr != "" {
|
||||
la.UserAttr = lc.UserAttr
|
||||
} else {
|
||||
la.UserAttr = "gecos"
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (la *LdapAuthenticator) CanLogin(
|
||||
user *User,
|
||||
user *schema.User,
|
||||
username string,
|
||||
rw http.ResponseWriter,
|
||||
r *http.Request) bool {
|
||||
r *http.Request) (*schema.User, bool) {
|
||||
|
||||
return user != nil && user.AuthSource == AuthViaLDAP
|
||||
lc := config.Keys.LdapConfig
|
||||
|
||||
if user != nil {
|
||||
if user.AuthSource == schema.AuthViaLDAP {
|
||||
return user, true
|
||||
}
|
||||
} else {
|
||||
if lc.SyncUserOnLogin {
|
||||
l, err := la.getLdapConnection(true)
|
||||
if err != nil {
|
||||
log.Error("LDAP connection error")
|
||||
}
|
||||
defer l.Close()
|
||||
|
||||
// Search for the given username
|
||||
searchRequest := ldap.NewSearchRequest(
|
||||
lc.UserBase,
|
||||
ldap.ScopeWholeSubtree, ldap.NeverDerefAliases, 0, 0, false,
|
||||
fmt.Sprintf("(&%s(uid=%s))", lc.UserFilter, username),
|
||||
[]string{"dn", "uid", la.UserAttr}, nil)
|
||||
|
||||
sr, err := l.Search(searchRequest)
|
||||
if err != nil {
|
||||
log.Warn(err)
|
||||
return nil, false
|
||||
}
|
||||
|
||||
if len(sr.Entries) != 1 {
|
||||
log.Warn("LDAP: User does not exist or too many entries returned")
|
||||
return nil, false
|
||||
}
|
||||
|
||||
entry := sr.Entries[0]
|
||||
name := entry.GetAttributeValue(la.UserAttr)
|
||||
var roles []string
|
||||
roles = append(roles, schema.GetRoleString(schema.RoleUser))
|
||||
projects := make([]string, 0)
|
||||
|
||||
user = &schema.User{
|
||||
Username: username,
|
||||
Name: name,
|
||||
Roles: roles,
|
||||
Projects: projects,
|
||||
AuthType: schema.AuthSession,
|
||||
AuthSource: schema.AuthViaLDAP,
|
||||
}
|
||||
|
||||
if err := repository.GetUserRepository().AddUser(user); err != nil {
|
||||
log.Errorf("User '%s' LDAP: Insert into DB failed", username)
|
||||
return nil, false
|
||||
}
|
||||
|
||||
return user, true
|
||||
}
|
||||
}
|
||||
|
||||
return nil, false
|
||||
}
|
||||
|
||||
func (la *LdapAuthenticator) Login(
|
||||
user *User,
|
||||
user *schema.User,
|
||||
rw http.ResponseWriter,
|
||||
r *http.Request) (*User, error) {
|
||||
r *http.Request) (*schema.User, error) {
|
||||
|
||||
l, err := la.getLdapConnection(false)
|
||||
if err != nil {
|
||||
@ -84,42 +147,30 @@ func (la *LdapAuthenticator) Login(
|
||||
}
|
||||
defer l.Close()
|
||||
|
||||
userDn := strings.Replace(la.config.UserBind, "{username}", user.Username, -1)
|
||||
userDn := strings.Replace(config.Keys.LdapConfig.UserBind, "{username}", user.Username, -1)
|
||||
if err := l.Bind(userDn, r.FormValue("password")); err != nil {
|
||||
log.Errorf("AUTH/LOCAL > Authentication for user %s failed: %v", user.Username, err)
|
||||
return nil, fmt.Errorf("AUTH/LDAP > Authentication failed")
|
||||
log.Errorf("AUTH/LDAP > Authentication for user %s failed: %v",
|
||||
user.Username, err)
|
||||
return nil, fmt.Errorf("Authentication failed")
|
||||
}
|
||||
|
||||
return user, nil
|
||||
}
|
||||
|
||||
func (la *LdapAuthenticator) Auth(
|
||||
rw http.ResponseWriter,
|
||||
r *http.Request) (*User, error) {
|
||||
|
||||
return la.auth.AuthViaSession(rw, r)
|
||||
}
|
||||
|
||||
func (la *LdapAuthenticator) Sync() error {
|
||||
|
||||
const IN_DB int = 1
|
||||
const IN_LDAP int = 2
|
||||
const IN_BOTH int = 3
|
||||
ur := repository.GetUserRepository()
|
||||
lc := config.Keys.LdapConfig
|
||||
|
||||
users := map[string]int{}
|
||||
rows, err := la.auth.db.Query(`SELECT username FROM user WHERE user.ldap = 1`)
|
||||
usernames, err := ur.GetLdapUsernames()
|
||||
if err != nil {
|
||||
log.Warn("Error while querying LDAP users")
|
||||
return err
|
||||
}
|
||||
|
||||
for rows.Next() {
|
||||
var username string
|
||||
if err := rows.Scan(&username); err != nil {
|
||||
log.Warnf("Error while scanning for user '%s'", username)
|
||||
return err
|
||||
}
|
||||
|
||||
for _, username := range usernames {
|
||||
users[username] = IN_DB
|
||||
}
|
||||
|
||||
@ -131,8 +182,10 @@ func (la *LdapAuthenticator) Sync() error {
|
||||
defer l.Close()
|
||||
|
||||
ldapResults, err := l.Search(ldap.NewSearchRequest(
|
||||
la.config.UserBase, ldap.ScopeWholeSubtree, ldap.NeverDerefAliases, 0, 0, false,
|
||||
la.config.UserFilter, []string{"dn", "uid", "gecos"}, nil))
|
||||
lc.UserBase,
|
||||
ldap.ScopeWholeSubtree, ldap.NeverDerefAliases, 0, 0, false,
|
||||
lc.UserFilter,
|
||||
[]string{"dn", "uid", la.UserAttr}, nil))
|
||||
if err != nil {
|
||||
log.Warn("LDAP search error")
|
||||
return err
|
||||
@ -148,25 +201,34 @@ func (la *LdapAuthenticator) Sync() error {
|
||||
_, ok := users[username]
|
||||
if !ok {
|
||||
users[username] = IN_LDAP
|
||||
newnames[username] = entry.GetAttributeValue("gecos")
|
||||
newnames[username] = entry.GetAttributeValue(la.UserAttr)
|
||||
} else {
|
||||
users[username] = IN_BOTH
|
||||
}
|
||||
}
|
||||
|
||||
for username, where := range users {
|
||||
if where == IN_DB && la.config.SyncDelOldUsers {
|
||||
if where == IN_DB && lc.SyncDelOldUsers {
|
||||
ur.DelUser(username)
|
||||
log.Debugf("sync: remove %v (does not show up in LDAP anymore)", username)
|
||||
if _, err := la.auth.db.Exec(`DELETE FROM user WHERE user.username = ?`, username); err != nil {
|
||||
log.Errorf("User '%s' not in LDAP anymore: Delete from DB failed", username)
|
||||
return err
|
||||
}
|
||||
} else if where == IN_LDAP {
|
||||
name := newnames[username]
|
||||
|
||||
var roles []string
|
||||
roles = append(roles, schema.GetRoleString(schema.RoleUser))
|
||||
projects := make([]string, 0)
|
||||
|
||||
user := &schema.User{
|
||||
Username: username,
|
||||
Name: name,
|
||||
Roles: roles,
|
||||
Projects: projects,
|
||||
AuthSource: schema.AuthViaLDAP,
|
||||
}
|
||||
|
||||
log.Debugf("sync: add %v (name: %v, roles: [user], ldap: true)", username, name)
|
||||
if _, err := la.auth.db.Exec(`INSERT INTO user (username, ldap, name, roles) VALUES (?, ?, ?, ?)`,
|
||||
username, 1, name, "[\""+GetRoleString(RoleUser)+"\"]"); err != nil {
|
||||
log.Errorf("User '%s' new in LDAP: Insert into DB failed", username)
|
||||
if err := ur.AddUser(user); err != nil {
|
||||
log.Errorf("User '%s' LDAP: Insert into DB failed", username)
|
||||
return err
|
||||
}
|
||||
}
|
||||
@ -175,18 +237,17 @@ func (la *LdapAuthenticator) Sync() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// TODO: Add a connection pool or something like
|
||||
// that so that connections can be reused/cached.
|
||||
func (la *LdapAuthenticator) getLdapConnection(admin bool) (*ldap.Conn, error) {
|
||||
|
||||
conn, err := ldap.DialURL(la.config.Url)
|
||||
lc := config.Keys.LdapConfig
|
||||
conn, err := ldap.DialURL(lc.Url)
|
||||
if err != nil {
|
||||
log.Warn("LDAP URL dial failed")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if admin {
|
||||
if err := conn.Bind(la.config.SearchDN, la.syncPassword); err != nil {
|
||||
if err := conn.Bind(lc.SearchDN, la.syncPassword); err != nil {
|
||||
conn.Close()
|
||||
log.Warn("LDAP connection bind failed")
|
||||
return nil, err
|
||||
|
@ -9,6 +9,7 @@ import (
|
||||
"net/http"
|
||||
|
||||
"github.com/ClusterCockpit/cc-backend/pkg/log"
|
||||
"github.com/ClusterCockpit/cc-backend/pkg/schema"
|
||||
"golang.org/x/crypto/bcrypt"
|
||||
)
|
||||
|
||||
@ -18,38 +19,29 @@ type LocalAuthenticator struct {
|
||||
|
||||
var _ Authenticator = (*LocalAuthenticator)(nil)
|
||||
|
||||
func (la *LocalAuthenticator) Init(
|
||||
auth *Authentication,
|
||||
_ interface{}) error {
|
||||
|
||||
la.auth = auth
|
||||
func (la *LocalAuthenticator) Init() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (la *LocalAuthenticator) CanLogin(
|
||||
user *User,
|
||||
user *schema.User,
|
||||
username string,
|
||||
rw http.ResponseWriter,
|
||||
r *http.Request) bool {
|
||||
r *http.Request) (*schema.User, bool) {
|
||||
|
||||
return user != nil && user.AuthSource == AuthViaLocalPassword
|
||||
return user, user != nil && user.AuthSource == schema.AuthViaLocalPassword
|
||||
}
|
||||
|
||||
func (la *LocalAuthenticator) Login(
|
||||
user *User,
|
||||
user *schema.User,
|
||||
rw http.ResponseWriter,
|
||||
r *http.Request) (*User, error) {
|
||||
r *http.Request) (*schema.User, error) {
|
||||
|
||||
if e := bcrypt.CompareHashAndPassword([]byte(user.Password), []byte(r.FormValue("password"))); e != nil {
|
||||
if e := bcrypt.CompareHashAndPassword([]byte(user.Password),
|
||||
[]byte(r.FormValue("password"))); e != nil {
|
||||
log.Errorf("AUTH/LOCAL > Authentication for user %s failed!", user.Username)
|
||||
return nil, fmt.Errorf("AUTH/LOCAL > Authentication failed")
|
||||
return nil, fmt.Errorf("Authentication failed")
|
||||
}
|
||||
|
||||
return user, nil
|
||||
}
|
||||
|
||||
func (la *LocalAuthenticator) Auth(
|
||||
rw http.ResponseWriter,
|
||||
r *http.Request) (*User, error) {
|
||||
|
||||
return la.auth.AuthViaSession(rw, r)
|
||||
}
|
||||
|
@ -1,289 +0,0 @@
|
||||
// Copyright (C) 2022 NHR@FAU, University Erlangen-Nuremberg.
|
||||
// All rights reserved.
|
||||
// Use of this source code is governed by a MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
package auth
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/ClusterCockpit/cc-backend/internal/graph/model"
|
||||
"github.com/ClusterCockpit/cc-backend/pkg/log"
|
||||
sq "github.com/Masterminds/squirrel"
|
||||
"github.com/jmoiron/sqlx"
|
||||
"golang.org/x/crypto/bcrypt"
|
||||
)
|
||||
|
||||
func (auth *Authentication) GetUser(username string) (*User, error) {
|
||||
|
||||
user := &User{Username: username}
|
||||
var hashedPassword, name, rawRoles, email, rawProjects sql.NullString
|
||||
if err := sq.Select("password", "ldap", "name", "roles", "email", "projects").From("user").
|
||||
Where("user.username = ?", username).RunWith(auth.db).
|
||||
QueryRow().Scan(&hashedPassword, &user.AuthSource, &name, &rawRoles, &email, &rawProjects); err != nil {
|
||||
log.Warnf("Error while querying user '%v' from database", username)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
user.Password = hashedPassword.String
|
||||
user.Name = name.String
|
||||
user.Email = email.String
|
||||
if rawRoles.Valid {
|
||||
if err := json.Unmarshal([]byte(rawRoles.String), &user.Roles); err != nil {
|
||||
log.Warn("Error while unmarshaling raw roles from DB")
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
if rawProjects.Valid {
|
||||
if err := json.Unmarshal([]byte(rawProjects.String), &user.Projects); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
return user, nil
|
||||
}
|
||||
|
||||
func (auth *Authentication) AddUser(user *User) error {
|
||||
|
||||
rolesJson, _ := json.Marshal(user.Roles)
|
||||
projectsJson, _ := json.Marshal(user.Projects)
|
||||
|
||||
cols := []string{"username", "roles", "projects"}
|
||||
vals := []interface{}{user.Username, string(rolesJson), string(projectsJson)}
|
||||
|
||||
if user.Name != "" {
|
||||
cols = append(cols, "name")
|
||||
vals = append(vals, user.Name)
|
||||
}
|
||||
if user.Email != "" {
|
||||
cols = append(cols, "email")
|
||||
vals = append(vals, user.Email)
|
||||
}
|
||||
if user.Password != "" {
|
||||
password, err := bcrypt.GenerateFromPassword([]byte(user.Password), bcrypt.DefaultCost)
|
||||
if err != nil {
|
||||
log.Error("Error while encrypting new user password")
|
||||
return err
|
||||
}
|
||||
cols = append(cols, "password")
|
||||
vals = append(vals, string(password))
|
||||
}
|
||||
|
||||
if _, err := sq.Insert("user").Columns(cols...).Values(vals...).RunWith(auth.db).Exec(); err != nil {
|
||||
log.Errorf("Error while inserting new user '%v' into DB", user.Username)
|
||||
return err
|
||||
}
|
||||
|
||||
log.Infof("new user %#v created (roles: %s, auth-source: %d, projects: %s)", user.Username, rolesJson, user.AuthSource, projectsJson)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (auth *Authentication) DelUser(username string) error {
|
||||
|
||||
_, err := auth.db.Exec(`DELETE FROM user WHERE user.username = ?`, username)
|
||||
log.Errorf("Error while deleting user '%s' from DB", username)
|
||||
return err
|
||||
}
|
||||
|
||||
func (auth *Authentication) ListUsers(specialsOnly bool) ([]*User, error) {
|
||||
|
||||
q := sq.Select("username", "name", "email", "roles", "projects").From("user")
|
||||
if specialsOnly {
|
||||
q = q.Where("(roles != '[\"user\"]' AND roles != '[]')")
|
||||
}
|
||||
|
||||
rows, err := q.RunWith(auth.db).Query()
|
||||
if err != nil {
|
||||
log.Warn("Error while querying user list")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
users := make([]*User, 0)
|
||||
defer rows.Close()
|
||||
for rows.Next() {
|
||||
rawroles := ""
|
||||
rawprojects := ""
|
||||
user := &User{}
|
||||
var name, email sql.NullString
|
||||
if err := rows.Scan(&user.Username, &name, &email, &rawroles, &rawprojects); err != nil {
|
||||
log.Warn("Error while scanning user list")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := json.Unmarshal([]byte(rawroles), &user.Roles); err != nil {
|
||||
log.Warn("Error while unmarshaling raw role list")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := json.Unmarshal([]byte(rawprojects), &user.Projects); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
user.Name = name.String
|
||||
user.Email = email.String
|
||||
users = append(users, user)
|
||||
}
|
||||
return users, nil
|
||||
}
|
||||
|
||||
func (auth *Authentication) AddRole(
|
||||
ctx context.Context,
|
||||
username string,
|
||||
queryrole string) error {
|
||||
|
||||
newRole := strings.ToLower(queryrole)
|
||||
user, err := auth.GetUser(username)
|
||||
if err != nil {
|
||||
log.Warnf("Could not load user '%s'", username)
|
||||
return err
|
||||
}
|
||||
|
||||
exists, valid := user.HasValidRole(newRole)
|
||||
|
||||
if !valid {
|
||||
return fmt.Errorf("Supplied role is no valid option : %v", newRole)
|
||||
}
|
||||
if exists {
|
||||
return fmt.Errorf("User %v already has role %v", username, newRole)
|
||||
}
|
||||
|
||||
roles, _ := json.Marshal(append(user.Roles, newRole))
|
||||
if _, err := sq.Update("user").Set("roles", roles).Where("user.username = ?", username).RunWith(auth.db).Exec(); err != nil {
|
||||
log.Errorf("Error while adding new role for user '%s'", user.Username)
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (auth *Authentication) RemoveRole(ctx context.Context, username string, queryrole string) error {
|
||||
oldRole := strings.ToLower(queryrole)
|
||||
user, err := auth.GetUser(username)
|
||||
if err != nil {
|
||||
log.Warnf("Could not load user '%s'", username)
|
||||
return err
|
||||
}
|
||||
|
||||
exists, valid := user.HasValidRole(oldRole)
|
||||
|
||||
if !valid {
|
||||
return fmt.Errorf("Supplied role is no valid option : %v", oldRole)
|
||||
}
|
||||
if !exists {
|
||||
return fmt.Errorf("Role already deleted for user '%v': %v", username, oldRole)
|
||||
}
|
||||
|
||||
if oldRole == GetRoleString(RoleManager) && len(user.Projects) != 0 {
|
||||
return fmt.Errorf("Cannot remove role 'manager' while user %s still has assigned project(s) : %v", username, user.Projects)
|
||||
}
|
||||
|
||||
var newroles []string
|
||||
for _, r := range user.Roles {
|
||||
if r != oldRole {
|
||||
newroles = append(newroles, r) // Append all roles not matching requested to be deleted role
|
||||
}
|
||||
}
|
||||
|
||||
var mroles, _ = json.Marshal(newroles)
|
||||
if _, err := sq.Update("user").Set("roles", mroles).Where("user.username = ?", username).RunWith(auth.db).Exec(); err != nil {
|
||||
log.Errorf("Error while removing role for user '%s'", user.Username)
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (auth *Authentication) AddProject(
|
||||
ctx context.Context,
|
||||
username string,
|
||||
project string) error {
|
||||
|
||||
user, err := auth.GetUser(username)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if !user.HasRole(RoleManager) {
|
||||
return fmt.Errorf("user '%s' is not a manager!", username)
|
||||
}
|
||||
|
||||
if user.HasProject(project) {
|
||||
return fmt.Errorf("user '%s' already manages project '%s'", username, project)
|
||||
}
|
||||
|
||||
projects, _ := json.Marshal(append(user.Projects, project))
|
||||
if _, err := sq.Update("user").Set("projects", projects).Where("user.username = ?", username).RunWith(auth.db).Exec(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (auth *Authentication) RemoveProject(ctx context.Context, username string, project string) error {
|
||||
user, err := auth.GetUser(username)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if !user.HasRole(RoleManager) {
|
||||
return fmt.Errorf("user '%#v' is not a manager!", username)
|
||||
}
|
||||
|
||||
if !user.HasProject(project) {
|
||||
return fmt.Errorf("user '%#v': Cannot remove project '%#v' - Does not match!", username, project)
|
||||
}
|
||||
|
||||
var exists bool
|
||||
var newprojects []string
|
||||
for _, p := range user.Projects {
|
||||
if p != project {
|
||||
newprojects = append(newprojects, p) // Append all projects not matching requested to be deleted project
|
||||
} else {
|
||||
exists = true
|
||||
}
|
||||
}
|
||||
|
||||
if exists == true {
|
||||
var result interface{}
|
||||
if len(newprojects) == 0 {
|
||||
result = "[]"
|
||||
} else {
|
||||
result, _ = json.Marshal(newprojects)
|
||||
}
|
||||
if _, err := sq.Update("user").Set("projects", result).Where("user.username = ?", username).RunWith(auth.db).Exec(); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
} else {
|
||||
return fmt.Errorf("user %s already does not manage project %s", username, project)
|
||||
}
|
||||
}
|
||||
|
||||
func FetchUser(ctx context.Context, db *sqlx.DB, username string) (*model.User, error) {
|
||||
me := GetUser(ctx)
|
||||
if me != nil && me.Username != username && me.HasNotRoles([]Role{RoleAdmin, RoleSupport, RoleManager}) {
|
||||
return nil, errors.New("forbidden")
|
||||
}
|
||||
|
||||
user := &model.User{Username: username}
|
||||
var name, email sql.NullString
|
||||
if err := sq.Select("name", "email").From("user").Where("user.username = ?", username).
|
||||
RunWith(db).QueryRow().Scan(&name, &email); err != nil {
|
||||
if err == sql.ErrNoRows {
|
||||
/* This warning will be logged *often* for non-local users, i.e. users mentioned only in job-table or archive, */
|
||||
/* since FetchUser will be called to retrieve full name and mail for every job in query/list */
|
||||
// log.Warnf("User '%s' Not found in DB", username)
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
log.Warnf("Error while fetching user '%s'", username)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
user.Name = name.String
|
||||
user.Email = email.String
|
||||
return user, nil
|
||||
}
|
@ -22,26 +22,29 @@ var Keys schema.ProgramConfig = schema.ProgramConfig{
|
||||
Archive: json.RawMessage(`{\"kind\":\"file\",\"path\":\"./var/job-archive\"}`),
|
||||
DisableArchive: false,
|
||||
Validate: false,
|
||||
LdapConfig: nil,
|
||||
SessionMaxAge: "168h",
|
||||
StopJobsExceedingWalltime: 0,
|
||||
ShortRunningJobsDuration: 5 * 60,
|
||||
UiDefaults: map[string]interface{}{
|
||||
"analysis_view_histogramMetrics": []string{"flops_any", "mem_bw", "mem_used"},
|
||||
"analysis_view_scatterPlotMetrics": [][]string{{"flops_any", "mem_bw"}, {"flops_any", "cpu_load"}, {"cpu_load", "mem_bw"}},
|
||||
"job_view_nodestats_selectedMetrics": []string{"flops_any", "mem_bw", "mem_used"},
|
||||
"job_view_polarPlotMetrics": []string{"flops_any", "mem_bw", "mem_used"},
|
||||
"job_view_selectedMetrics": []string{"flops_any", "mem_bw", "mem_used"},
|
||||
"plot_general_colorBackground": true,
|
||||
"plot_general_colorscheme": []string{"#00bfff", "#0000ff", "#ff00ff", "#ff0000", "#ff8000", "#ffff00", "#80ff00"},
|
||||
"plot_general_lineWidth": 3,
|
||||
"plot_list_jobsPerPage": 50,
|
||||
"plot_list_selectedMetrics": []string{"cpu_load", "mem_used", "flops_any", "mem_bw"},
|
||||
"plot_view_plotsPerRow": 3,
|
||||
"plot_view_showPolarplot": true,
|
||||
"plot_view_showRoofline": true,
|
||||
"plot_view_showStatTable": true,
|
||||
"system_view_selectedMetric": "cpu_load",
|
||||
"analysis_view_histogramMetrics": []string{"flops_any", "mem_bw", "mem_used"},
|
||||
"analysis_view_scatterPlotMetrics": [][]string{{"flops_any", "mem_bw"}, {"flops_any", "cpu_load"}, {"cpu_load", "mem_bw"}},
|
||||
"job_view_nodestats_selectedMetrics": []string{"flops_any", "mem_bw", "mem_used"},
|
||||
"job_view_polarPlotMetrics": []string{"flops_any", "mem_bw", "mem_used"},
|
||||
"job_view_selectedMetrics": []string{"flops_any", "mem_bw", "mem_used"},
|
||||
"plot_general_colorBackground": true,
|
||||
"plot_general_colorscheme": []string{"#00bfff", "#0000ff", "#ff00ff", "#ff0000", "#ff8000", "#ffff00", "#80ff00"},
|
||||
"plot_general_lineWidth": 3,
|
||||
"plot_list_jobsPerPage": 50,
|
||||
"plot_list_selectedMetrics": []string{"cpu_load", "mem_used", "flops_any", "mem_bw"},
|
||||
"plot_view_plotsPerRow": 3,
|
||||
"plot_view_showPolarplot": true,
|
||||
"plot_view_showRoofline": true,
|
||||
"plot_view_showStatTable": true,
|
||||
"system_view_selectedMetric": "cpu_load",
|
||||
"analysis_view_selectedTopEntity": "user",
|
||||
"analysis_view_selectedTopCategory": "totalWalltime",
|
||||
"status_view_selectedTopUserCategory": "totalJobs",
|
||||
"status_view_selectedTopProjectCategory": "totalJobs",
|
||||
},
|
||||
}
|
||||
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -22,8 +22,8 @@ type FloatRange struct {
|
||||
}
|
||||
|
||||
type Footprints struct {
|
||||
Nodehours []schema.Float `json:"nodehours"`
|
||||
Metrics []*MetricFootprints `json:"metrics"`
|
||||
TimeWeights *TimeWeights `json:"timeWeights"`
|
||||
Metrics []*MetricFootprints `json:"metrics"`
|
||||
}
|
||||
|
||||
type HistoPoint struct {
|
||||
@ -37,27 +37,27 @@ type IntRangeOutput struct {
|
||||
}
|
||||
|
||||
type JobFilter struct {
|
||||
Tags []string `json:"tags"`
|
||||
JobID *StringInput `json:"jobId"`
|
||||
ArrayJobID *int `json:"arrayJobId"`
|
||||
User *StringInput `json:"user"`
|
||||
Project *StringInput `json:"project"`
|
||||
JobName *StringInput `json:"jobName"`
|
||||
Cluster *StringInput `json:"cluster"`
|
||||
Partition *StringInput `json:"partition"`
|
||||
Duration *schema.IntRange `json:"duration"`
|
||||
MinRunningFor *int `json:"minRunningFor"`
|
||||
NumNodes *schema.IntRange `json:"numNodes"`
|
||||
NumAccelerators *schema.IntRange `json:"numAccelerators"`
|
||||
NumHWThreads *schema.IntRange `json:"numHWThreads"`
|
||||
StartTime *schema.TimeRange `json:"startTime"`
|
||||
State []schema.JobState `json:"state"`
|
||||
FlopsAnyAvg *FloatRange `json:"flopsAnyAvg"`
|
||||
MemBwAvg *FloatRange `json:"memBwAvg"`
|
||||
LoadAvg *FloatRange `json:"loadAvg"`
|
||||
MemUsedMax *FloatRange `json:"memUsedMax"`
|
||||
Exclusive *int `json:"exclusive"`
|
||||
Node *StringInput `json:"node"`
|
||||
Tags []string `json:"tags,omitempty"`
|
||||
JobID *StringInput `json:"jobId,omitempty"`
|
||||
ArrayJobID *int `json:"arrayJobId,omitempty"`
|
||||
User *StringInput `json:"user,omitempty"`
|
||||
Project *StringInput `json:"project,omitempty"`
|
||||
JobName *StringInput `json:"jobName,omitempty"`
|
||||
Cluster *StringInput `json:"cluster,omitempty"`
|
||||
Partition *StringInput `json:"partition,omitempty"`
|
||||
Duration *schema.IntRange `json:"duration,omitempty"`
|
||||
MinRunningFor *int `json:"minRunningFor,omitempty"`
|
||||
NumNodes *schema.IntRange `json:"numNodes,omitempty"`
|
||||
NumAccelerators *schema.IntRange `json:"numAccelerators,omitempty"`
|
||||
NumHWThreads *schema.IntRange `json:"numHWThreads,omitempty"`
|
||||
StartTime *schema.TimeRange `json:"startTime,omitempty"`
|
||||
State []schema.JobState `json:"state,omitempty"`
|
||||
FlopsAnyAvg *FloatRange `json:"flopsAnyAvg,omitempty"`
|
||||
MemBwAvg *FloatRange `json:"memBwAvg,omitempty"`
|
||||
LoadAvg *FloatRange `json:"loadAvg,omitempty"`
|
||||
MemUsedMax *FloatRange `json:"memUsedMax,omitempty"`
|
||||
Exclusive *int `json:"exclusive,omitempty"`
|
||||
Node *StringInput `json:"node,omitempty"`
|
||||
}
|
||||
|
||||
type JobLink struct {
|
||||
@ -66,9 +66,9 @@ type JobLink struct {
|
||||
}
|
||||
|
||||
type JobLinkResultList struct {
|
||||
ListQuery *string `json:"listQuery"`
|
||||
ListQuery *string `json:"listQuery,omitempty"`
|
||||
Items []*JobLink `json:"items"`
|
||||
Count *int `json:"count"`
|
||||
Count *int `json:"count,omitempty"`
|
||||
}
|
||||
|
||||
type JobMetricWithName struct {
|
||||
@ -79,9 +79,9 @@ type JobMetricWithName struct {
|
||||
|
||||
type JobResultList struct {
|
||||
Items []*schema.Job `json:"items"`
|
||||
Offset *int `json:"offset"`
|
||||
Limit *int `json:"limit"`
|
||||
Count *int `json:"count"`
|
||||
Offset *int `json:"offset,omitempty"`
|
||||
Limit *int `json:"limit,omitempty"`
|
||||
Count *int `json:"count,omitempty"`
|
||||
}
|
||||
|
||||
type JobsStatistics struct {
|
||||
@ -91,11 +91,16 @@ type JobsStatistics struct {
|
||||
RunningJobs int `json:"runningJobs"`
|
||||
ShortJobs int `json:"shortJobs"`
|
||||
TotalWalltime int `json:"totalWalltime"`
|
||||
TotalNodes int `json:"totalNodes"`
|
||||
TotalNodeHours int `json:"totalNodeHours"`
|
||||
TotalCores int `json:"totalCores"`
|
||||
TotalCoreHours int `json:"totalCoreHours"`
|
||||
TotalAccs int `json:"totalAccs"`
|
||||
TotalAccHours int `json:"totalAccHours"`
|
||||
HistDuration []*HistoPoint `json:"histDuration"`
|
||||
HistNumNodes []*HistoPoint `json:"histNumNodes"`
|
||||
HistNumCores []*HistoPoint `json:"histNumCores"`
|
||||
HistNumAccs []*HistoPoint `json:"histNumAccs"`
|
||||
}
|
||||
|
||||
type MetricFootprints struct {
|
||||
@ -120,12 +125,12 @@ type PageRequest struct {
|
||||
}
|
||||
|
||||
type StringInput struct {
|
||||
Eq *string `json:"eq"`
|
||||
Neq *string `json:"neq"`
|
||||
Contains *string `json:"contains"`
|
||||
StartsWith *string `json:"startsWith"`
|
||||
EndsWith *string `json:"endsWith"`
|
||||
In []string `json:"in"`
|
||||
Eq *string `json:"eq,omitempty"`
|
||||
Neq *string `json:"neq,omitempty"`
|
||||
Contains *string `json:"contains,omitempty"`
|
||||
StartsWith *string `json:"startsWith,omitempty"`
|
||||
EndsWith *string `json:"endsWith,omitempty"`
|
||||
In []string `json:"in,omitempty"`
|
||||
}
|
||||
|
||||
type TimeRangeOutput struct {
|
||||
@ -133,6 +138,12 @@ type TimeRangeOutput struct {
|
||||
To time.Time `json:"to"`
|
||||
}
|
||||
|
||||
type TimeWeights struct {
|
||||
NodeHours []schema.Float `json:"nodeHours"`
|
||||
AccHours []schema.Float `json:"accHours"`
|
||||
CoreHours []schema.Float `json:"coreHours"`
|
||||
}
|
||||
|
||||
type User struct {
|
||||
Username string `json:"username"`
|
||||
Name string `json:"name"`
|
||||
@ -182,6 +193,59 @@ func (e Aggregate) MarshalGQL(w io.Writer) {
|
||||
fmt.Fprint(w, strconv.Quote(e.String()))
|
||||
}
|
||||
|
||||
type SortByAggregate string
|
||||
|
||||
const (
|
||||
SortByAggregateTotalwalltime SortByAggregate = "TOTALWALLTIME"
|
||||
SortByAggregateTotaljobs SortByAggregate = "TOTALJOBS"
|
||||
SortByAggregateTotalnodes SortByAggregate = "TOTALNODES"
|
||||
SortByAggregateTotalnodehours SortByAggregate = "TOTALNODEHOURS"
|
||||
SortByAggregateTotalcores SortByAggregate = "TOTALCORES"
|
||||
SortByAggregateTotalcorehours SortByAggregate = "TOTALCOREHOURS"
|
||||
SortByAggregateTotalaccs SortByAggregate = "TOTALACCS"
|
||||
SortByAggregateTotalacchours SortByAggregate = "TOTALACCHOURS"
|
||||
)
|
||||
|
||||
var AllSortByAggregate = []SortByAggregate{
|
||||
SortByAggregateTotalwalltime,
|
||||
SortByAggregateTotaljobs,
|
||||
SortByAggregateTotalnodes,
|
||||
SortByAggregateTotalnodehours,
|
||||
SortByAggregateTotalcores,
|
||||
SortByAggregateTotalcorehours,
|
||||
SortByAggregateTotalaccs,
|
||||
SortByAggregateTotalacchours,
|
||||
}
|
||||
|
||||
func (e SortByAggregate) IsValid() bool {
|
||||
switch e {
|
||||
case SortByAggregateTotalwalltime, SortByAggregateTotaljobs, SortByAggregateTotalnodes, SortByAggregateTotalnodehours, SortByAggregateTotalcores, SortByAggregateTotalcorehours, SortByAggregateTotalaccs, SortByAggregateTotalacchours:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (e SortByAggregate) String() string {
|
||||
return string(e)
|
||||
}
|
||||
|
||||
func (e *SortByAggregate) UnmarshalGQL(v interface{}) error {
|
||||
str, ok := v.(string)
|
||||
if !ok {
|
||||
return fmt.Errorf("enums must be strings")
|
||||
}
|
||||
|
||||
*e = SortByAggregate(str)
|
||||
if !e.IsValid() {
|
||||
return fmt.Errorf("%s is not a valid SortByAggregate", str)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (e SortByAggregate) MarshalGQL(w io.Writer) {
|
||||
fmt.Fprint(w, strconv.Quote(e.String()))
|
||||
}
|
||||
|
||||
type SortDirectionEnum string
|
||||
|
||||
const (
|
||||
@ -222,44 +286,3 @@ func (e *SortDirectionEnum) UnmarshalGQL(v interface{}) error {
|
||||
func (e SortDirectionEnum) MarshalGQL(w io.Writer) {
|
||||
fmt.Fprint(w, strconv.Quote(e.String()))
|
||||
}
|
||||
|
||||
type Weights string
|
||||
|
||||
const (
|
||||
WeightsNodeCount Weights = "NODE_COUNT"
|
||||
WeightsNodeHours Weights = "NODE_HOURS"
|
||||
)
|
||||
|
||||
var AllWeights = []Weights{
|
||||
WeightsNodeCount,
|
||||
WeightsNodeHours,
|
||||
}
|
||||
|
||||
func (e Weights) IsValid() bool {
|
||||
switch e {
|
||||
case WeightsNodeCount, WeightsNodeHours:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (e Weights) String() string {
|
||||
return string(e)
|
||||
}
|
||||
|
||||
func (e *Weights) UnmarshalGQL(v interface{}) error {
|
||||
str, ok := v.(string)
|
||||
if !ok {
|
||||
return fmt.Errorf("enums must be strings")
|
||||
}
|
||||
|
||||
*e = Weights(str)
|
||||
if !e.IsValid() {
|
||||
return fmt.Errorf("%s is not a valid Weights", str)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (e Weights) MarshalGQL(w io.Writer) {
|
||||
fmt.Fprint(w, strconv.Quote(e.String()))
|
||||
}
|
||||
|
@ -2,7 +2,7 @@ package graph
|
||||
|
||||
// This file will be automatically regenerated based on the schema, any resolver implementations
|
||||
// will be copied through when generating and any unknown code will be moved to the end.
|
||||
// Code generated by github.com/99designs/gqlgen version v0.17.24
|
||||
// Code generated by github.com/99designs/gqlgen version v0.17.36
|
||||
|
||||
import (
|
||||
"context"
|
||||
@ -11,7 +11,6 @@ import (
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/ClusterCockpit/cc-backend/internal/auth"
|
||||
"github.com/ClusterCockpit/cc-backend/internal/graph/generated"
|
||||
"github.com/ClusterCockpit/cc-backend/internal/graph/model"
|
||||
"github.com/ClusterCockpit/cc-backend/internal/metricdata"
|
||||
@ -51,7 +50,7 @@ func (r *jobResolver) MetaData(ctx context.Context, obj *schema.Job) (interface{
|
||||
|
||||
// UserData is the resolver for the userData field.
|
||||
func (r *jobResolver) UserData(ctx context.Context, obj *schema.Job) (*model.User, error) {
|
||||
return auth.FetchUser(ctx, r.DB, obj.User)
|
||||
return repository.GetUserRepository().FetchUserInCtx(ctx, obj.User)
|
||||
}
|
||||
|
||||
// CreateTag is the resolver for the createTag field.
|
||||
@ -122,7 +121,7 @@ func (r *mutationResolver) RemoveTagsFromJob(ctx context.Context, job string, ta
|
||||
|
||||
// UpdateConfiguration is the resolver for the updateConfiguration field.
|
||||
func (r *mutationResolver) UpdateConfiguration(ctx context.Context, name string, value string) (*string, error) {
|
||||
if err := repository.GetUserCfgRepo().UpdateConfig(name, value, auth.GetUser(ctx)); err != nil {
|
||||
if err := repository.GetUserCfgRepo().UpdateConfig(name, value, repository.GetUserFromContext(ctx)); err != nil {
|
||||
log.Warn("Error while updating user config")
|
||||
return nil, err
|
||||
}
|
||||
@ -142,7 +141,7 @@ func (r *queryResolver) Tags(ctx context.Context) ([]*schema.Tag, error) {
|
||||
|
||||
// User is the resolver for the user field.
|
||||
func (r *queryResolver) User(ctx context.Context, username string) (*model.User, error) {
|
||||
return auth.FetchUser(ctx, r.DB, username)
|
||||
return repository.GetUserRepository().FetchUserInCtx(ctx, username)
|
||||
}
|
||||
|
||||
// AllocatedNodes is the resolver for the allocatedNodes field.
|
||||
@ -178,7 +177,9 @@ func (r *queryResolver) Job(ctx context.Context, id string) (*schema.Job, error)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if user := auth.GetUser(ctx); user != nil && job.User != user.Username && user.HasNotRoles([]auth.Role{auth.RoleAdmin, auth.RoleSupport, auth.RoleManager}) {
|
||||
if user := repository.GetUserFromContext(ctx); user != nil &&
|
||||
job.User != user.Username &&
|
||||
user.HasNotRoles([]schema.Role{schema.RoleAdmin, schema.RoleSupport, schema.RoleManager}) {
|
||||
return nil, errors.New("you are not allowed to see this job")
|
||||
}
|
||||
|
||||
@ -243,34 +244,34 @@ func (r *queryResolver) Jobs(ctx context.Context, filter []*model.JobFilter, pag
|
||||
}
|
||||
|
||||
// JobsStatistics is the resolver for the jobsStatistics field.
|
||||
func (r *queryResolver) JobsStatistics(ctx context.Context, filter []*model.JobFilter, groupBy *model.Aggregate) ([]*model.JobsStatistics, error) {
|
||||
func (r *queryResolver) JobsStatistics(ctx context.Context, filter []*model.JobFilter, page *model.PageRequest, sortBy *model.SortByAggregate, groupBy *model.Aggregate) ([]*model.JobsStatistics, error) {
|
||||
var err error
|
||||
var stats []*model.JobsStatistics
|
||||
|
||||
if requireField(ctx, "totalJobs") {
|
||||
if requireField(ctx, "totalJobs") || requireField(ctx, "totalWalltime") || requireField(ctx, "totalNodes") || requireField(ctx, "totalCores") ||
|
||||
requireField(ctx, "totalAccs") || requireField(ctx, "totalNodeHours") || requireField(ctx, "totalCoreHours") || requireField(ctx, "totalAccHours") {
|
||||
if groupBy == nil {
|
||||
stats, err = r.Repo.JobsStats(ctx, filter)
|
||||
} else {
|
||||
stats, err = r.Repo.JobsStatsGrouped(ctx, filter, groupBy)
|
||||
stats, err = r.Repo.JobsStatsGrouped(ctx, filter, page, sortBy, groupBy)
|
||||
}
|
||||
} else {
|
||||
stats = make([]*model.JobsStatistics, 0, 1)
|
||||
stats = append(stats,
|
||||
&model.JobsStatistics{})
|
||||
stats = append(stats, &model.JobsStatistics{})
|
||||
}
|
||||
|
||||
if groupBy != nil {
|
||||
if requireField(ctx, "shortJobs") {
|
||||
stats, err = r.Repo.AddJobCountGrouped(ctx, filter, groupBy, stats, "short")
|
||||
}
|
||||
if requireField(ctx, "RunningJobs") {
|
||||
if requireField(ctx, "runningJobs") {
|
||||
stats, err = r.Repo.AddJobCountGrouped(ctx, filter, groupBy, stats, "running")
|
||||
}
|
||||
} else {
|
||||
if requireField(ctx, "shortJobs") {
|
||||
stats, err = r.Repo.AddJobCount(ctx, filter, stats, "short")
|
||||
}
|
||||
if requireField(ctx, "RunningJobs") {
|
||||
if requireField(ctx, "runningJobs") {
|
||||
stats, err = r.Repo.AddJobCount(ctx, filter, stats, "running")
|
||||
}
|
||||
}
|
||||
@ -279,7 +280,7 @@ func (r *queryResolver) JobsStatistics(ctx context.Context, filter []*model.JobF
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if requireField(ctx, "histDuration") || requireField(ctx, "histNumNodes") {
|
||||
if requireField(ctx, "histDuration") || requireField(ctx, "histNumNodes") || requireField(ctx, "histNumCores") || requireField(ctx, "histNumAccs") {
|
||||
if groupBy == nil {
|
||||
stats[0], err = r.Repo.AddHistograms(ctx, filter, stats[0])
|
||||
if err != nil {
|
||||
@ -293,24 +294,6 @@ func (r *queryResolver) JobsStatistics(ctx context.Context, filter []*model.JobF
|
||||
return stats, nil
|
||||
}
|
||||
|
||||
// JobsCount is the resolver for the jobsCount field.
|
||||
func (r *queryResolver) JobsCount(ctx context.Context, filter []*model.JobFilter, groupBy model.Aggregate, weight *model.Weights, limit *int) ([]*model.Count, error) {
|
||||
counts, err := r.Repo.CountGroupedJobs(ctx, groupBy, filter, weight, limit)
|
||||
if err != nil {
|
||||
log.Warn("Error while counting grouped jobs")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
res := make([]*model.Count, 0, len(counts))
|
||||
for name, count := range counts {
|
||||
res = append(res, &model.Count{
|
||||
Name: name,
|
||||
Count: count,
|
||||
})
|
||||
}
|
||||
return res, nil
|
||||
}
|
||||
|
||||
// RooflineHeatmap is the resolver for the rooflineHeatmap field.
|
||||
func (r *queryResolver) RooflineHeatmap(ctx context.Context, filter []*model.JobFilter, rows int, cols int, minX float64, minY float64, maxX float64, maxY float64) ([][]float64, error) {
|
||||
return r.rooflineHeatmap(ctx, filter, rows, cols, minX, minY, maxX, maxY)
|
||||
@ -318,8 +301,8 @@ func (r *queryResolver) RooflineHeatmap(ctx context.Context, filter []*model.Job
|
||||
|
||||
// NodeMetrics is the resolver for the nodeMetrics field.
|
||||
func (r *queryResolver) NodeMetrics(ctx context.Context, cluster string, nodes []string, scopes []schema.MetricScope, metrics []string, from time.Time, to time.Time) ([]*model.NodeMetrics, error) {
|
||||
user := auth.GetUser(ctx)
|
||||
if user != nil && !user.HasRole(auth.RoleAdmin) {
|
||||
user := repository.GetUserFromContext(ctx)
|
||||
if user != nil && !user.HasRole(schema.RoleAdmin) {
|
||||
return nil, errors.New("you need to be an administrator for this query")
|
||||
}
|
||||
|
||||
|
@ -6,7 +6,6 @@ package graph
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"math"
|
||||
|
||||
@ -15,6 +14,7 @@ import (
|
||||
"github.com/ClusterCockpit/cc-backend/internal/metricdata"
|
||||
"github.com/ClusterCockpit/cc-backend/pkg/log"
|
||||
"github.com/ClusterCockpit/cc-backend/pkg/schema"
|
||||
// "github.com/ClusterCockpit/cc-backend/pkg/archive"
|
||||
)
|
||||
|
||||
const MAX_JOBS_FOR_ANALYSIS = 500
|
||||
@ -32,7 +32,7 @@ func (r *queryResolver) rooflineHeatmap(
|
||||
return nil, err
|
||||
}
|
||||
if len(jobs) > MAX_JOBS_FOR_ANALYSIS {
|
||||
return nil, fmt.Errorf("GRAPH/STATS > too many jobs matched (max: %d)", MAX_JOBS_FOR_ANALYSIS)
|
||||
return nil, fmt.Errorf("GRAPH/UTIL > too many jobs matched (max: %d)", MAX_JOBS_FOR_ANALYSIS)
|
||||
}
|
||||
|
||||
fcols, frows := float64(cols), float64(rows)
|
||||
@ -49,20 +49,24 @@ func (r *queryResolver) rooflineHeatmap(
|
||||
|
||||
jobdata, err := metricdata.LoadData(job, []string{"flops_any", "mem_bw"}, []schema.MetricScope{schema.MetricScopeNode}, ctx)
|
||||
if err != nil {
|
||||
log.Error("Error while loading metrics for roofline")
|
||||
log.Errorf("Error while loading roofline metrics for job %d", job.ID)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
flops_, membw_ := jobdata["flops_any"], jobdata["mem_bw"]
|
||||
if flops_ == nil && membw_ == nil {
|
||||
return nil, fmt.Errorf("GRAPH/STATS > 'flops_any' or 'mem_bw' missing for job %d", job.ID)
|
||||
log.Infof("rooflineHeatmap(): 'flops_any' or 'mem_bw' missing for job %d", job.ID)
|
||||
continue
|
||||
// return nil, fmt.Errorf("GRAPH/UTIL > 'flops_any' or 'mem_bw' missing for job %d", job.ID)
|
||||
}
|
||||
|
||||
flops, ok1 := flops_["node"]
|
||||
membw, ok2 := membw_["node"]
|
||||
if !ok1 || !ok2 {
|
||||
log.Info("rooflineHeatmap() query not implemented for where flops_any or mem_bw not available at 'node' level")
|
||||
continue
|
||||
// TODO/FIXME:
|
||||
return nil, errors.New("GRAPH/STATS > todo: rooflineHeatmap() query not implemented for where flops_any or mem_bw not available at 'node' level")
|
||||
// return nil, errors.New("GRAPH/UTIL > todo: rooflineHeatmap() query not implemented for where flops_any or mem_bw not available at 'node' level")
|
||||
}
|
||||
|
||||
for n := 0; n < len(flops.Series); n++ {
|
||||
@ -98,7 +102,7 @@ func (r *queryResolver) jobsFootprints(ctx context.Context, filter []*model.JobF
|
||||
return nil, err
|
||||
}
|
||||
if len(jobs) > MAX_JOBS_FOR_ANALYSIS {
|
||||
return nil, fmt.Errorf("GRAPH/STATS > too many jobs matched (max: %d)", MAX_JOBS_FOR_ANALYSIS)
|
||||
return nil, fmt.Errorf("GRAPH/UTIL > too many jobs matched (max: %d)", MAX_JOBS_FOR_ANALYSIS)
|
||||
}
|
||||
|
||||
avgs := make([][]schema.Float, len(metrics))
|
||||
@ -106,7 +110,11 @@ func (r *queryResolver) jobsFootprints(ctx context.Context, filter []*model.JobF
|
||||
avgs[i] = make([]schema.Float, 0, len(jobs))
|
||||
}
|
||||
|
||||
nodehours := make([]schema.Float, 0, len(jobs))
|
||||
timeweights := new(model.TimeWeights)
|
||||
timeweights.NodeHours = make([]schema.Float, 0, len(jobs))
|
||||
timeweights.AccHours = make([]schema.Float, 0, len(jobs))
|
||||
timeweights.CoreHours = make([]schema.Float, 0, len(jobs))
|
||||
|
||||
for _, job := range jobs {
|
||||
if job.MonitoringStatus == schema.MonitoringStatusDisabled || job.MonitoringStatus == schema.MonitoringStatusArchivingFailed {
|
||||
continue
|
||||
@ -117,7 +125,18 @@ func (r *queryResolver) jobsFootprints(ctx context.Context, filter []*model.JobF
|
||||
return nil, err
|
||||
}
|
||||
|
||||
nodehours = append(nodehours, schema.Float(float64(job.Duration)/60.0*float64(job.NumNodes)))
|
||||
// #166 collect arrays: Null values or no null values?
|
||||
timeweights.NodeHours = append(timeweights.NodeHours, schema.Float(float64(job.Duration)/60.0*float64(job.NumNodes)))
|
||||
if job.NumAcc > 0 {
|
||||
timeweights.AccHours = append(timeweights.AccHours, schema.Float(float64(job.Duration)/60.0*float64(job.NumAcc)))
|
||||
} else {
|
||||
timeweights.AccHours = append(timeweights.AccHours, schema.Float(1.0))
|
||||
}
|
||||
if job.NumHWThreads > 0 {
|
||||
timeweights.CoreHours = append(timeweights.CoreHours, schema.Float(float64(job.Duration)/60.0*float64(job.NumHWThreads))) // SQLite HWThreads == Cores; numCoresForJob(job)
|
||||
} else {
|
||||
timeweights.CoreHours = append(timeweights.CoreHours, schema.Float(1.0))
|
||||
}
|
||||
}
|
||||
|
||||
res := make([]*model.MetricFootprints, len(avgs))
|
||||
@ -129,11 +148,34 @@ func (r *queryResolver) jobsFootprints(ctx context.Context, filter []*model.JobF
|
||||
}
|
||||
|
||||
return &model.Footprints{
|
||||
Nodehours: nodehours,
|
||||
Metrics: res,
|
||||
TimeWeights: timeweights,
|
||||
Metrics: res,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// func numCoresForJob(job *schema.Job) (numCores int) {
|
||||
|
||||
// subcluster, scerr := archive.GetSubCluster(job.Cluster, job.SubCluster)
|
||||
// if scerr != nil {
|
||||
// return 1
|
||||
// }
|
||||
|
||||
// totalJobCores := 0
|
||||
// topology := subcluster.Topology
|
||||
|
||||
// for _, host := range job.Resources {
|
||||
// hwthreads := host.HWThreads
|
||||
// if hwthreads == nil {
|
||||
// hwthreads = topology.Node
|
||||
// }
|
||||
|
||||
// hostCores, _ := topology.GetCoresFromHWThreads(hwthreads)
|
||||
// totalJobCores += len(hostCores)
|
||||
// }
|
||||
|
||||
// return totalJobCores
|
||||
// }
|
||||
|
||||
func requireField(ctx context.Context, name string) bool {
|
||||
fields := graphql.CollectAllFields(ctx)
|
||||
|
||||
|
@ -42,6 +42,9 @@ func setup(t *testing.T) *repository.JobRepository {
|
||||
"kind": "file",
|
||||
"path": "./var/job-archive"
|
||||
},
|
||||
"jwts": {
|
||||
"max-age": "2m"
|
||||
},
|
||||
"clusters": [
|
||||
{
|
||||
"name": "testcluster",
|
||||
|
@ -506,7 +506,7 @@ func (ccms *CCMetricStore) LoadStats(
|
||||
metrics []string,
|
||||
ctx context.Context) (map[string]map[string]schema.MetricStatistics, error) {
|
||||
|
||||
queries, _, err := ccms.buildQueries(job, metrics, []schema.MetricScope{schema.MetricScopeNode})
|
||||
queries, _, err := ccms.buildQueries(job, metrics, []schema.MetricScope{schema.MetricScopeNode}) // #166 Add scope shere for analysis view accelerator normalization?
|
||||
if err != nil {
|
||||
log.Warn("Error while building query")
|
||||
return nil, err
|
||||
@ -533,7 +533,9 @@ func (ccms *CCMetricStore) LoadStats(
|
||||
metric := ccms.toLocalName(query.Metric)
|
||||
data := res[0]
|
||||
if data.Error != nil {
|
||||
return nil, fmt.Errorf("METRICDATA/CCMS > fetching %s for node %s failed: %s", metric, query.Hostname, *data.Error)
|
||||
log.Infof("fetching %s for node %s failed: %s", metric, query.Hostname, *data.Error)
|
||||
continue
|
||||
// return nil, fmt.Errorf("METRICDATA/CCMS > fetching %s for node %s failed: %s", metric, query.Hostname, *data.Error)
|
||||
}
|
||||
|
||||
metricdata, ok := stats[metric]
|
||||
@ -543,7 +545,9 @@ func (ccms *CCMetricStore) LoadStats(
|
||||
}
|
||||
|
||||
if data.Avg.IsNaN() || data.Min.IsNaN() || data.Max.IsNaN() {
|
||||
return nil, fmt.Errorf("METRICDATA/CCMS > fetching %s for node %s failed: %s", metric, query.Hostname, "avg/min/max is NaN")
|
||||
log.Infof("fetching %s for node %s failed: one of avg/min/max is NaN", metric, query.Hostname)
|
||||
continue
|
||||
// return nil, fmt.Errorf("METRICDATA/CCMS > fetching %s for node %s failed: %s", metric, query.Hostname, "avg/min/max is NaN")
|
||||
}
|
||||
|
||||
metricdata[query.Hostname] = schema.MetricStatistics{
|
||||
|
@ -182,7 +182,7 @@ func LoadAverages(
|
||||
ctx context.Context) error {
|
||||
|
||||
if job.State != schema.JobStateRunning && useArchive {
|
||||
return archive.LoadAveragesFromArchive(job, metrics, data)
|
||||
return archive.LoadAveragesFromArchive(job, metrics, data) // #166 change also here?
|
||||
}
|
||||
|
||||
repo, ok := metricDataRepos[job.Cluster]
|
||||
@ -190,7 +190,7 @@ func LoadAverages(
|
||||
return fmt.Errorf("METRICDATA/METRICDATA > no metric data repository configured for '%s'", job.Cluster)
|
||||
}
|
||||
|
||||
stats, err := repo.LoadStats(job, metrics, ctx)
|
||||
stats, err := repo.LoadStats(job, metrics, ctx) // #166 how to handle stats for acc normalizazion?
|
||||
if err != nil {
|
||||
log.Errorf("Error while loading statistics for job %v (User %v, Project %v)", job.JobID, job.User, job.Project)
|
||||
return err
|
||||
|
@ -326,7 +326,6 @@ func (pdb *PrometheusDataRepository) LoadData(
|
||||
Timestep: metricConfig.Timestep,
|
||||
Series: make([]schema.Series, 0),
|
||||
}
|
||||
jobData[metric][scope] = jobMetric
|
||||
}
|
||||
step := int64(metricConfig.Timestep)
|
||||
steps := int64(to.Sub(from).Seconds()) / step
|
||||
@ -335,6 +334,10 @@ func (pdb *PrometheusDataRepository) LoadData(
|
||||
jobMetric.Series = append(jobMetric.Series,
|
||||
pdb.RowToSeries(from, step, steps, row))
|
||||
}
|
||||
// only add metric if at least one host returned data
|
||||
if !ok && len(jobMetric.Series) > 0{
|
||||
jobData[metric][scope] = jobMetric
|
||||
}
|
||||
// sort by hostname to get uniform coloring
|
||||
sort.Slice(jobMetric.Series, func(i, j int) bool {
|
||||
return (jobMetric.Series[i].Hostname < jobMetric.Series[j].Hostname)
|
||||
|
@ -14,7 +14,6 @@ import (
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/ClusterCockpit/cc-backend/internal/auth"
|
||||
"github.com/ClusterCockpit/cc-backend/internal/graph/model"
|
||||
"github.com/ClusterCockpit/cc-backend/internal/metricdata"
|
||||
"github.com/ClusterCockpit/cc-backend/pkg/log"
|
||||
@ -456,69 +455,6 @@ func (r *JobRepository) DeleteJobById(id int64) error {
|
||||
return err
|
||||
}
|
||||
|
||||
// TODO: Use node hours instead: SELECT job.user, sum(job.num_nodes * (CASE WHEN job.job_state = "running" THEN CAST(strftime('%s', 'now') AS INTEGER) - job.start_time ELSE job.duration END)) as x FROM job GROUP BY user ORDER BY x DESC;
|
||||
func (r *JobRepository) CountGroupedJobs(
|
||||
ctx context.Context,
|
||||
aggreg model.Aggregate,
|
||||
filters []*model.JobFilter,
|
||||
weight *model.Weights,
|
||||
limit *int) (map[string]int, error) {
|
||||
|
||||
start := time.Now()
|
||||
if !aggreg.IsValid() {
|
||||
return nil, errors.New("invalid aggregate")
|
||||
}
|
||||
|
||||
runner := (sq.BaseRunner)(r.stmtCache)
|
||||
count := "count(*) as count"
|
||||
if weight != nil {
|
||||
switch *weight {
|
||||
case model.WeightsNodeCount:
|
||||
count = "sum(job.num_nodes) as count"
|
||||
case model.WeightsNodeHours:
|
||||
now := time.Now().Unix()
|
||||
count = fmt.Sprintf(`sum(job.num_nodes * (CASE WHEN job.job_state = "running" THEN %d - job.start_time ELSE job.duration END)) as count`, now)
|
||||
runner = r.DB
|
||||
default:
|
||||
log.Debugf("CountGroupedJobs() Weight %v unknown.", *weight)
|
||||
}
|
||||
}
|
||||
|
||||
q, qerr := SecurityCheck(ctx, sq.Select("job."+string(aggreg), count).From("job").GroupBy("job."+string(aggreg)).OrderBy("count DESC"))
|
||||
|
||||
if qerr != nil {
|
||||
return nil, qerr
|
||||
}
|
||||
|
||||
for _, f := range filters {
|
||||
q = BuildWhereClause(f, q)
|
||||
}
|
||||
if limit != nil {
|
||||
q = q.Limit(uint64(*limit))
|
||||
}
|
||||
|
||||
counts := map[string]int{}
|
||||
rows, err := q.RunWith(runner).Query()
|
||||
if err != nil {
|
||||
log.Error("Error while running query")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for rows.Next() {
|
||||
var group string
|
||||
var count int
|
||||
if err := rows.Scan(&group, &count); err != nil {
|
||||
log.Warn("Error while scanning rows")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
counts[group] = count
|
||||
}
|
||||
|
||||
log.Debugf("Timer CountGroupedJobs %s", time.Since(start))
|
||||
return counts, nil
|
||||
}
|
||||
|
||||
func (r *JobRepository) UpdateMonitoringStatus(job int64, monitoringStatus int32) (err error) {
|
||||
stmt := sq.Update("job").
|
||||
Set("monitoring_status", monitoringStatus).
|
||||
@ -615,7 +551,7 @@ func (r *JobRepository) WaitForArchiving() {
|
||||
r.archivePending.Wait()
|
||||
}
|
||||
|
||||
func (r *JobRepository) FindUserOrProjectOrJobname(user *auth.User, searchterm string) (jobid string, username string, project string, jobname string) {
|
||||
func (r *JobRepository) FindUserOrProjectOrJobname(user *schema.User, searchterm string) (jobid string, username string, project string, jobname string) {
|
||||
if _, err := strconv.Atoi(searchterm); err == nil { // Return empty on successful conversion: parent method will redirect for integer jobId
|
||||
return searchterm, "", "", ""
|
||||
} else { // Has to have letters and logged-in user for other guesses
|
||||
@ -644,14 +580,14 @@ func (r *JobRepository) FindUserOrProjectOrJobname(user *auth.User, searchterm s
|
||||
var ErrNotFound = errors.New("no such jobname, project or user")
|
||||
var ErrForbidden = errors.New("not authorized")
|
||||
|
||||
func (r *JobRepository) FindColumnValue(user *auth.User, searchterm string, table string, selectColumn string, whereColumn string, isLike bool) (result string, err error) {
|
||||
func (r *JobRepository) FindColumnValue(user *schema.User, searchterm string, table string, selectColumn string, whereColumn string, isLike bool) (result string, err error) {
|
||||
compareStr := " = ?"
|
||||
query := searchterm
|
||||
if isLike {
|
||||
compareStr = " LIKE ?"
|
||||
query = "%" + searchterm + "%"
|
||||
}
|
||||
if user.HasAnyRole([]auth.Role{auth.RoleAdmin, auth.RoleSupport, auth.RoleManager}) {
|
||||
if user.HasAnyRole([]schema.Role{schema.RoleAdmin, schema.RoleSupport, schema.RoleManager}) {
|
||||
theQuery := sq.Select(table+"."+selectColumn).Distinct().From(table).
|
||||
Where(table+"."+whereColumn+compareStr, query)
|
||||
|
||||
@ -676,9 +612,9 @@ func (r *JobRepository) FindColumnValue(user *auth.User, searchterm string, tabl
|
||||
}
|
||||
}
|
||||
|
||||
func (r *JobRepository) FindColumnValues(user *auth.User, query string, table string, selectColumn string, whereColumn string) (results []string, err error) {
|
||||
func (r *JobRepository) FindColumnValues(user *schema.User, query string, table string, selectColumn string, whereColumn string) (results []string, err error) {
|
||||
emptyResult := make([]string, 0)
|
||||
if user.HasAnyRole([]auth.Role{auth.RoleAdmin, auth.RoleSupport, auth.RoleManager}) {
|
||||
if user.HasAnyRole([]schema.Role{schema.RoleAdmin, schema.RoleSupport, schema.RoleManager}) {
|
||||
rows, err := sq.Select(table+"."+selectColumn).Distinct().From(table).
|
||||
Where(table+"."+whereColumn+" LIKE ?", fmt.Sprint("%", query, "%")).
|
||||
RunWith(r.stmtCache).Query()
|
||||
|
@ -12,20 +12,23 @@ import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/ClusterCockpit/cc-backend/internal/auth"
|
||||
"github.com/ClusterCockpit/cc-backend/internal/graph/model"
|
||||
"github.com/ClusterCockpit/cc-backend/pkg/log"
|
||||
"github.com/ClusterCockpit/cc-backend/pkg/schema"
|
||||
sq "github.com/Masterminds/squirrel"
|
||||
)
|
||||
|
||||
// SecurityCheck-less, private: Returns a list of jobs matching the provided filters. page and order are optional-
|
||||
func (r *JobRepository) queryJobs(
|
||||
query sq.SelectBuilder,
|
||||
func (r *JobRepository) QueryJobs(
|
||||
ctx context.Context,
|
||||
filters []*model.JobFilter,
|
||||
page *model.PageRequest,
|
||||
order *model.OrderByInput) ([]*schema.Job, error) {
|
||||
|
||||
query, qerr := SecurityCheck(ctx, sq.Select(jobColumns...).From("job"))
|
||||
if qerr != nil {
|
||||
return nil, qerr
|
||||
}
|
||||
|
||||
if order != nil {
|
||||
field := toSnakeCase(order.Field)
|
||||
|
||||
@ -68,34 +71,15 @@ func (r *JobRepository) queryJobs(
|
||||
return jobs, nil
|
||||
}
|
||||
|
||||
// testFunction for queryJobs
|
||||
func (r *JobRepository) testQueryJobs(
|
||||
filters []*model.JobFilter,
|
||||
page *model.PageRequest,
|
||||
order *model.OrderByInput) ([]*schema.Job, error) {
|
||||
|
||||
return r.queryJobs(sq.Select(jobColumns...).From("job"), filters, page, order)
|
||||
}
|
||||
|
||||
// Public function with added securityCheck, calls private queryJobs function above
|
||||
func (r *JobRepository) QueryJobs(
|
||||
func (r *JobRepository) CountJobs(
|
||||
ctx context.Context,
|
||||
filters []*model.JobFilter,
|
||||
page *model.PageRequest,
|
||||
order *model.OrderByInput) ([]*schema.Job, error) {
|
||||
|
||||
query, qerr := SecurityCheck(ctx, sq.Select(jobColumns...).From("job"))
|
||||
if qerr != nil {
|
||||
return nil, qerr
|
||||
}
|
||||
|
||||
return r.queryJobs(query, filters, page, order)
|
||||
}
|
||||
|
||||
// SecurityCheck-less, private: Returns the number of jobs matching the filters
|
||||
func (r *JobRepository) countJobs(query sq.SelectBuilder,
|
||||
filters []*model.JobFilter) (int, error) {
|
||||
|
||||
query, qerr := SecurityCheck(ctx, sq.Select("count(*)").From("job"))
|
||||
if qerr != nil {
|
||||
return 0, qerr
|
||||
}
|
||||
|
||||
for _, f := range filters {
|
||||
query = BuildWhereClause(f, query)
|
||||
}
|
||||
@ -108,42 +92,21 @@ func (r *JobRepository) countJobs(query sq.SelectBuilder,
|
||||
return count, nil
|
||||
}
|
||||
|
||||
// testFunction for countJobs
|
||||
func (r *JobRepository) testCountJobs(
|
||||
filters []*model.JobFilter) (int, error) {
|
||||
|
||||
return r.countJobs(sq.Select("count(*)").From("job"), filters)
|
||||
}
|
||||
|
||||
// Public function with added securityCheck, calls private countJobs function above
|
||||
func (r *JobRepository) CountJobs(
|
||||
ctx context.Context,
|
||||
filters []*model.JobFilter) (int, error) {
|
||||
|
||||
query, qerr := SecurityCheck(ctx, sq.Select("count(*)").From("job"))
|
||||
|
||||
if qerr != nil {
|
||||
return 0, qerr
|
||||
}
|
||||
|
||||
return r.countJobs(query, filters)
|
||||
}
|
||||
|
||||
func SecurityCheck(ctx context.Context, query sq.SelectBuilder) (sq.SelectBuilder, error) {
|
||||
user := auth.GetUser(ctx)
|
||||
user := GetUserFromContext(ctx)
|
||||
if user == nil {
|
||||
var qnil sq.SelectBuilder
|
||||
return qnil, fmt.Errorf("user context is nil!")
|
||||
} else if user.HasAnyRole([]auth.Role{auth.RoleAdmin, auth.RoleSupport, auth.RoleApi}) { // Admin & Co. : All jobs
|
||||
} else if user.HasAnyRole([]schema.Role{schema.RoleAdmin, schema.RoleSupport, schema.RoleApi}) { // Admin & Co. : All jobs
|
||||
return query, nil
|
||||
} else if user.HasRole(auth.RoleManager) { // Manager : Add filter for managed projects' jobs only + personal jobs
|
||||
} else if user.HasRole(schema.RoleManager) { // Manager : Add filter for managed projects' jobs only + personal jobs
|
||||
if len(user.Projects) != 0 {
|
||||
return query.Where(sq.Or{sq.Eq{"job.project": user.Projects}, sq.Eq{"job.user": user.Username}}), nil
|
||||
} else {
|
||||
log.Debugf("Manager-User '%s' has no defined projects to lookup! Query only personal jobs ...", user.Username)
|
||||
return query.Where("job.user = ?", user.Username), nil
|
||||
}
|
||||
} else if user.HasRole(auth.RoleUser) { // User : Only personal jobs
|
||||
} else if user.HasRole(schema.RoleUser) { // User : Only personal jobs
|
||||
return query.Where("job.user = ?", user.Username), nil
|
||||
} else {
|
||||
// Shortterm compatibility: Return User-Query if no roles:
|
||||
|
@ -5,10 +5,12 @@
|
||||
package repository
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
"github.com/ClusterCockpit/cc-backend/internal/graph/model"
|
||||
"github.com/ClusterCockpit/cc-backend/pkg/log"
|
||||
"github.com/ClusterCockpit/cc-backend/pkg/schema"
|
||||
_ "github.com/mattn/go-sqlite3"
|
||||
)
|
||||
|
||||
@ -94,7 +96,7 @@ func BenchmarkDB_CountJobs(b *testing.B) {
|
||||
|
||||
b.RunParallel(func(pb *testing.PB) {
|
||||
for pb.Next() {
|
||||
_, err := db.testCountJobs([]*model.JobFilter{filter})
|
||||
_, err := db.CountJobs(getContext(b), []*model.JobFilter{filter})
|
||||
noErr(b, err)
|
||||
}
|
||||
})
|
||||
@ -118,20 +120,37 @@ func BenchmarkDB_QueryJobs(b *testing.B) {
|
||||
|
||||
b.RunParallel(func(pb *testing.PB) {
|
||||
for pb.Next() {
|
||||
_, err := db.testQueryJobs([]*model.JobFilter{filter}, page, order)
|
||||
_, err := db.QueryJobs(getContext(b), []*model.JobFilter{filter}, page, order)
|
||||
noErr(b, err)
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
func getContext(tb testing.TB) context.Context {
|
||||
tb.Helper()
|
||||
|
||||
var roles []string
|
||||
roles = append(roles, schema.GetRoleString(schema.RoleAdmin))
|
||||
projects := make([]string, 0)
|
||||
|
||||
user := &schema.User{
|
||||
Username: "demo",
|
||||
Name: "The man",
|
||||
Roles: roles,
|
||||
Projects: projects,
|
||||
AuthSource: schema.AuthViaLDAP,
|
||||
}
|
||||
ctx := context.Background()
|
||||
return context.WithValue(ctx, ContextUserKey, user)
|
||||
}
|
||||
|
||||
func setup(tb testing.TB) *JobRepository {
|
||||
tb.Helper()
|
||||
log.Init("warn", true)
|
||||
dbfile := "testdata/job.db"
|
||||
err := MigrateDB("sqlite3", dbfile)
|
||||
noErr(tb, err)
|
||||
|
||||
Connect("sqlite3", dbfile)
|
||||
return GetJobRepository()
|
||||
}
|
||||
|
@ -10,7 +10,6 @@ import (
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/ClusterCockpit/cc-backend/internal/auth"
|
||||
"github.com/ClusterCockpit/cc-backend/internal/config"
|
||||
"github.com/ClusterCockpit/cc-backend/internal/graph/model"
|
||||
"github.com/ClusterCockpit/cc-backend/pkg/log"
|
||||
@ -24,6 +23,17 @@ var groupBy2column = map[model.Aggregate]string{
|
||||
model.AggregateCluster: "job.cluster",
|
||||
}
|
||||
|
||||
var sortBy2column = map[model.SortByAggregate]string{
|
||||
model.SortByAggregateTotaljobs: "totalJobs",
|
||||
model.SortByAggregateTotalwalltime: "totalWalltime",
|
||||
model.SortByAggregateTotalnodes: "totalNodes",
|
||||
model.SortByAggregateTotalnodehours: "totalNodeHours",
|
||||
model.SortByAggregateTotalcores: "totalCores",
|
||||
model.SortByAggregateTotalcorehours: "totalCoreHours",
|
||||
model.SortByAggregateTotalaccs: "totalAccs",
|
||||
model.SortByAggregateTotalacchours: "totalAccHours",
|
||||
}
|
||||
|
||||
func (r *JobRepository) buildCountQuery(
|
||||
filter []*model.JobFilter,
|
||||
kind string,
|
||||
@ -61,19 +71,26 @@ func (r *JobRepository) buildStatsQuery(
|
||||
castType := r.getCastType()
|
||||
|
||||
if col != "" {
|
||||
// Scan columns: id, totalJobs, totalWalltime, totalNodeHours, totalCoreHours, totalAccHours
|
||||
query = sq.Select(col, "COUNT(job.id)",
|
||||
fmt.Sprintf("CAST(ROUND(SUM(job.duration) / 3600) as %s)", castType),
|
||||
fmt.Sprintf("CAST(ROUND(SUM(job.duration * job.num_nodes) / 3600) as %s)", castType),
|
||||
fmt.Sprintf("CAST(ROUND(SUM(job.duration * job.num_hwthreads) / 3600) as %s)", castType),
|
||||
fmt.Sprintf("CAST(ROUND(SUM(job.duration * job.num_acc) / 3600) as %s)", castType),
|
||||
// Scan columns: id, totalJobs, totalWalltime, totalNodes, totalNodeHours, totalCores, totalCoreHours, totalAccs, totalAccHours
|
||||
query = sq.Select(col, "COUNT(job.id) as totalJobs",
|
||||
fmt.Sprintf("CAST(ROUND(SUM(job.duration) / 3600) as %s) as totalWalltime", castType),
|
||||
fmt.Sprintf("CAST(SUM(job.num_nodes) as %s) as totalNodes", castType),
|
||||
fmt.Sprintf("CAST(ROUND(SUM(job.duration * job.num_nodes) / 3600) as %s) as totalNodeHours", castType),
|
||||
fmt.Sprintf("CAST(SUM(job.num_hwthreads) as %s) as totalCores", castType),
|
||||
fmt.Sprintf("CAST(ROUND(SUM(job.duration * job.num_hwthreads) / 3600) as %s) as totalCoreHours", castType),
|
||||
fmt.Sprintf("CAST(SUM(job.num_acc) as %s) as totalAccs", castType),
|
||||
fmt.Sprintf("CAST(ROUND(SUM(job.duration * job.num_acc) / 3600) as %s) as totalAccHours", castType),
|
||||
).From("job").GroupBy(col)
|
||||
|
||||
} else {
|
||||
// Scan columns: totalJobs, totalWalltime, totalNodeHours, totalCoreHours, totalAccHours
|
||||
// Scan columns: totalJobs, totalWalltime, totalNodes, totalNodeHours, totalCores, totalCoreHours, totalAccs, totalAccHours
|
||||
query = sq.Select("COUNT(job.id)",
|
||||
fmt.Sprintf("CAST(ROUND(SUM(job.duration) / 3600) as %s)", castType),
|
||||
fmt.Sprintf("CAST(SUM(job.num_nodes) as %s)", castType),
|
||||
fmt.Sprintf("CAST(ROUND(SUM(job.duration * job.num_nodes) / 3600) as %s)", castType),
|
||||
fmt.Sprintf("CAST(SUM(job.num_hwthreads) as %s)", castType),
|
||||
fmt.Sprintf("CAST(ROUND(SUM(job.duration * job.num_hwthreads) / 3600) as %s)", castType),
|
||||
fmt.Sprintf("CAST(SUM(job.num_acc) as %s)", castType),
|
||||
fmt.Sprintf("CAST(ROUND(SUM(job.duration * job.num_acc) / 3600) as %s)", castType),
|
||||
).From("job")
|
||||
}
|
||||
@ -86,7 +103,7 @@ func (r *JobRepository) buildStatsQuery(
|
||||
}
|
||||
|
||||
func (r *JobRepository) getUserName(ctx context.Context, id string) string {
|
||||
user := auth.GetUser(ctx)
|
||||
user := GetUserFromContext(ctx)
|
||||
name, _ := r.FindColumnValue(user, id, "user", "name", "username", false)
|
||||
if name != "" {
|
||||
return name
|
||||
@ -113,16 +130,28 @@ func (r *JobRepository) getCastType() string {
|
||||
func (r *JobRepository) JobsStatsGrouped(
|
||||
ctx context.Context,
|
||||
filter []*model.JobFilter,
|
||||
page *model.PageRequest,
|
||||
sortBy *model.SortByAggregate,
|
||||
groupBy *model.Aggregate) ([]*model.JobsStatistics, error) {
|
||||
|
||||
start := time.Now()
|
||||
col := groupBy2column[*groupBy]
|
||||
query := r.buildStatsQuery(filter, col)
|
||||
|
||||
query, err := SecurityCheck(ctx, query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if sortBy != nil {
|
||||
sortBy := sortBy2column[*sortBy]
|
||||
query = query.OrderBy(fmt.Sprintf("%s DESC", sortBy))
|
||||
}
|
||||
if page != nil && page.ItemsPerPage != -1 {
|
||||
limit := uint64(page.ItemsPerPage)
|
||||
query = query.Offset((uint64(page.Page) - 1) * limit).Limit(limit)
|
||||
}
|
||||
|
||||
rows, err := query.RunWith(r.DB).Query()
|
||||
if err != nil {
|
||||
log.Warn("Error while querying DB for job statistics")
|
||||
@ -133,15 +162,36 @@ func (r *JobRepository) JobsStatsGrouped(
|
||||
|
||||
for rows.Next() {
|
||||
var id sql.NullString
|
||||
var jobs, walltime, nodeHours, coreHours, accHours sql.NullInt64
|
||||
if err := rows.Scan(&id, &jobs, &walltime, &nodeHours, &coreHours, &accHours); err != nil {
|
||||
var jobs, walltime, nodes, nodeHours, cores, coreHours, accs, accHours sql.NullInt64
|
||||
if err := rows.Scan(&id, &jobs, &walltime, &nodes, &nodeHours, &cores, &coreHours, &accs, &accHours); err != nil {
|
||||
log.Warn("Error while scanning rows")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if id.Valid {
|
||||
var totalCoreHours, totalAccHours int
|
||||
var totalJobs, totalWalltime, totalNodes, totalNodeHours, totalCores, totalCoreHours, totalAccs, totalAccHours int
|
||||
|
||||
if jobs.Valid {
|
||||
totalJobs = int(jobs.Int64)
|
||||
}
|
||||
|
||||
if walltime.Valid {
|
||||
totalWalltime = int(walltime.Int64)
|
||||
}
|
||||
|
||||
if nodes.Valid {
|
||||
totalNodes = int(nodes.Int64)
|
||||
}
|
||||
if cores.Valid {
|
||||
totalCores = int(cores.Int64)
|
||||
}
|
||||
if accs.Valid {
|
||||
totalAccs = int(accs.Int64)
|
||||
}
|
||||
|
||||
if nodeHours.Valid {
|
||||
totalNodeHours = int(nodeHours.Int64)
|
||||
}
|
||||
if coreHours.Valid {
|
||||
totalCoreHours = int(coreHours.Int64)
|
||||
}
|
||||
@ -155,9 +205,13 @@ func (r *JobRepository) JobsStatsGrouped(
|
||||
&model.JobsStatistics{
|
||||
ID: id.String,
|
||||
Name: name,
|
||||
TotalJobs: int(jobs.Int64),
|
||||
TotalWalltime: int(walltime.Int64),
|
||||
TotalJobs: totalJobs,
|
||||
TotalWalltime: totalWalltime,
|
||||
TotalNodes: totalNodes,
|
||||
TotalNodeHours: totalNodeHours,
|
||||
TotalCores: totalCores,
|
||||
TotalCoreHours: totalCoreHours,
|
||||
TotalAccs: totalAccs,
|
||||
TotalAccHours: totalAccHours})
|
||||
} else {
|
||||
stats = append(stats,
|
||||
@ -165,7 +219,11 @@ func (r *JobRepository) JobsStatsGrouped(
|
||||
ID: id.String,
|
||||
TotalJobs: int(jobs.Int64),
|
||||
TotalWalltime: int(walltime.Int64),
|
||||
TotalNodes: totalNodes,
|
||||
TotalNodeHours: totalNodeHours,
|
||||
TotalCores: totalCores,
|
||||
TotalCoreHours: totalCoreHours,
|
||||
TotalAccs: totalAccs,
|
||||
TotalAccHours: totalAccHours})
|
||||
}
|
||||
}
|
||||
@ -189,15 +247,18 @@ func (r *JobRepository) JobsStats(
|
||||
row := query.RunWith(r.DB).QueryRow()
|
||||
stats := make([]*model.JobsStatistics, 0, 1)
|
||||
|
||||
var jobs, walltime, nodeHours, coreHours, accHours sql.NullInt64
|
||||
if err := row.Scan(&jobs, &walltime, &nodeHours, &coreHours, &accHours); err != nil {
|
||||
var jobs, walltime, nodes, nodeHours, cores, coreHours, accs, accHours sql.NullInt64
|
||||
if err := row.Scan(&jobs, &walltime, &nodes, &nodeHours, &cores, &coreHours, &accs, &accHours); err != nil {
|
||||
log.Warn("Error while scanning rows")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if jobs.Valid {
|
||||
var totalCoreHours, totalAccHours int
|
||||
var totalNodeHours, totalCoreHours, totalAccHours int
|
||||
|
||||
if nodeHours.Valid {
|
||||
totalNodeHours = int(nodeHours.Int64)
|
||||
}
|
||||
if coreHours.Valid {
|
||||
totalCoreHours = int(coreHours.Int64)
|
||||
}
|
||||
@ -208,6 +269,7 @@ func (r *JobRepository) JobsStats(
|
||||
&model.JobsStatistics{
|
||||
TotalJobs: int(jobs.Int64),
|
||||
TotalWalltime: int(walltime.Int64),
|
||||
TotalNodeHours: totalNodeHours,
|
||||
TotalCoreHours: totalCoreHours,
|
||||
TotalAccHours: totalAccHours})
|
||||
}
|
||||
@ -322,7 +384,7 @@ func (r *JobRepository) AddJobCount(
|
||||
return nil, err
|
||||
}
|
||||
|
||||
counts := make(map[string]int)
|
||||
var count int
|
||||
|
||||
for rows.Next() {
|
||||
var cnt sql.NullInt64
|
||||
@ -330,20 +392,22 @@ func (r *JobRepository) AddJobCount(
|
||||
log.Warn("Error while scanning rows")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
count = int(cnt.Int64)
|
||||
}
|
||||
|
||||
switch kind {
|
||||
case "running":
|
||||
for _, s := range stats {
|
||||
s.RunningJobs = counts[s.ID]
|
||||
s.RunningJobs = count
|
||||
}
|
||||
case "short":
|
||||
for _, s := range stats {
|
||||
s.ShortJobs = counts[s.ID]
|
||||
s.ShortJobs = count
|
||||
}
|
||||
}
|
||||
|
||||
log.Debugf("Timer JobJobCount %s", time.Since(start))
|
||||
log.Debugf("Timer AddJobCount %s", time.Since(start))
|
||||
return stats, nil
|
||||
}
|
||||
|
||||
@ -368,6 +432,18 @@ func (r *JobRepository) AddHistograms(
|
||||
return nil, err
|
||||
}
|
||||
|
||||
stat.HistNumCores, err = r.jobsStatisticsHistogram(ctx, "job.num_hwthreads as value", filter)
|
||||
if err != nil {
|
||||
log.Warn("Error while loading job statistics histogram: num hwthreads")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
stat.HistNumAccs, err = r.jobsStatisticsHistogram(ctx, "job.num_acc as value", filter)
|
||||
if err != nil {
|
||||
log.Warn("Error while loading job statistics histogram: num acc")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
log.Debugf("Timer AddHistograms %s", time.Since(start))
|
||||
return stat, nil
|
||||
}
|
||||
|
@ -7,6 +7,8 @@ package repository
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"github.com/ClusterCockpit/cc-backend/internal/graph/model"
|
||||
)
|
||||
|
||||
func TestBuildJobStatsQuery(t *testing.T) {
|
||||
@ -19,3 +21,15 @@ func TestBuildJobStatsQuery(t *testing.T) {
|
||||
fmt.Printf("SQL: %s\n", sql)
|
||||
|
||||
}
|
||||
|
||||
func TestJobStats(t *testing.T) {
|
||||
r := setup(t)
|
||||
|
||||
filter := &model.JobFilter{}
|
||||
stats, err := r.JobsStats(getContext(t), []*model.JobFilter{filter})
|
||||
noErr(t, err)
|
||||
|
||||
if stats[0].TotalJobs != 6 {
|
||||
t.Fatalf("Want 98, Got %d", stats[0].TotalJobs)
|
||||
}
|
||||
}
|
||||
|
@ -7,7 +7,6 @@ package repository
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"github.com/ClusterCockpit/cc-backend/internal/auth"
|
||||
"github.com/ClusterCockpit/cc-backend/pkg/archive"
|
||||
"github.com/ClusterCockpit/cc-backend/pkg/log"
|
||||
"github.com/ClusterCockpit/cc-backend/pkg/schema"
|
||||
@ -68,7 +67,7 @@ func (r *JobRepository) CreateTag(tagType string, tagName string) (tagId int64,
|
||||
return res.LastInsertId()
|
||||
}
|
||||
|
||||
func (r *JobRepository) CountTags(user *auth.User) (tags []schema.Tag, counts map[string]int, err error) {
|
||||
func (r *JobRepository) CountTags(user *schema.User) (tags []schema.Tag, counts map[string]int, err error) {
|
||||
tags = make([]schema.Tag, 0, 100)
|
||||
xrows, err := r.DB.Queryx("SELECT id, tag_type, tag_name FROM tag")
|
||||
if err != nil {
|
||||
@ -88,10 +87,10 @@ func (r *JobRepository) CountTags(user *auth.User) (tags []schema.Tag, counts ma
|
||||
LeftJoin("jobtag jt ON t.id = jt.tag_id").
|
||||
GroupBy("t.tag_name")
|
||||
|
||||
if user != nil && user.HasAnyRole([]auth.Role{auth.RoleAdmin, auth.RoleSupport}) { // ADMIN || SUPPORT: Count all jobs
|
||||
if user != nil && user.HasAnyRole([]schema.Role{schema.RoleAdmin, schema.RoleSupport}) { // ADMIN || SUPPORT: Count all jobs
|
||||
log.Debug("CountTags: User Admin or Support -> Count all Jobs for Tags")
|
||||
// Unchanged: Needs to be own case still, due to UserRole/NoRole compatibility handling in else case
|
||||
} else if user != nil && user.HasRole(auth.RoleManager) { // MANAGER: Count own jobs plus project's jobs
|
||||
} else if user != nil && user.HasRole(schema.RoleManager) { // MANAGER: Count own jobs plus project's jobs
|
||||
// Build ("project1", "project2", ...) list of variable length directly in SQL string
|
||||
q = q.Where("jt.job_id IN (SELECT id FROM job WHERE job.user = ? OR job.project IN (\""+strings.Join(user.Projects, "\",\"")+"\"))", user.Username)
|
||||
} else if user != nil { // USER OR NO ROLE (Compatibility): Only count own jobs
|
||||
|
BIN
internal/repository/testdata/job.db
vendored
BIN
internal/repository/testdata/job.db
vendored
Binary file not shown.
@ -1,137 +1,355 @@
|
||||
// Copyright (C) 2022 NHR@FAU, University Erlangen-Nuremberg.
|
||||
// Copyright (C) 2023 NHR@FAU, University Erlangen-Nuremberg.
|
||||
// All rights reserved.
|
||||
// Use of this source code is governed by a MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
package repository
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/ClusterCockpit/cc-backend/internal/auth"
|
||||
"github.com/ClusterCockpit/cc-backend/internal/config"
|
||||
"github.com/ClusterCockpit/cc-backend/internal/graph/model"
|
||||
"github.com/ClusterCockpit/cc-backend/pkg/log"
|
||||
"github.com/ClusterCockpit/cc-backend/pkg/lrucache"
|
||||
"github.com/ClusterCockpit/cc-backend/pkg/schema"
|
||||
sq "github.com/Masterminds/squirrel"
|
||||
"github.com/jmoiron/sqlx"
|
||||
"golang.org/x/crypto/bcrypt"
|
||||
)
|
||||
|
||||
var (
|
||||
userCfgRepoOnce sync.Once
|
||||
userCfgRepoInstance *UserCfgRepo
|
||||
userRepoOnce sync.Once
|
||||
userRepoInstance *UserRepository
|
||||
)
|
||||
|
||||
type UserCfgRepo struct {
|
||||
DB *sqlx.DB
|
||||
Lookup *sqlx.Stmt
|
||||
lock sync.RWMutex
|
||||
uiDefaults map[string]interface{}
|
||||
cache *lrucache.Cache
|
||||
type UserRepository struct {
|
||||
DB *sqlx.DB
|
||||
driver string
|
||||
}
|
||||
|
||||
func GetUserCfgRepo() *UserCfgRepo {
|
||||
userCfgRepoOnce.Do(func() {
|
||||
func GetUserRepository() *UserRepository {
|
||||
userRepoOnce.Do(func() {
|
||||
db := GetConnection()
|
||||
|
||||
lookupConfigStmt, err := db.DB.Preparex(`SELECT confkey, value FROM configuration WHERE configuration.username = ?`)
|
||||
if err != nil {
|
||||
log.Fatalf("db.DB.Preparex() error: %v", err)
|
||||
}
|
||||
|
||||
userCfgRepoInstance = &UserCfgRepo{
|
||||
DB: db.DB,
|
||||
Lookup: lookupConfigStmt,
|
||||
uiDefaults: config.Keys.UiDefaults,
|
||||
cache: lrucache.New(1024),
|
||||
userRepoInstance = &UserRepository{
|
||||
DB: db.DB,
|
||||
driver: db.Driver,
|
||||
}
|
||||
})
|
||||
|
||||
return userCfgRepoInstance
|
||||
return userRepoInstance
|
||||
}
|
||||
|
||||
// Return the personalised UI config for the currently authenticated
|
||||
// user or return the plain default config.
|
||||
func (uCfg *UserCfgRepo) GetUIConfig(user *auth.User) (map[string]interface{}, error) {
|
||||
if user == nil {
|
||||
uCfg.lock.RLock()
|
||||
copy := make(map[string]interface{}, len(uCfg.uiDefaults))
|
||||
for k, v := range uCfg.uiDefaults {
|
||||
copy[k] = v
|
||||
}
|
||||
uCfg.lock.RUnlock()
|
||||
return copy, nil
|
||||
}
|
||||
|
||||
data := uCfg.cache.Get(user.Username, func() (interface{}, time.Duration, int) {
|
||||
uiconfig := make(map[string]interface{}, len(uCfg.uiDefaults))
|
||||
for k, v := range uCfg.uiDefaults {
|
||||
uiconfig[k] = v
|
||||
}
|
||||
|
||||
rows, err := uCfg.Lookup.Query(user.Username)
|
||||
if err != nil {
|
||||
log.Warnf("Error while looking up user uiconfig for user '%v'", user.Username)
|
||||
return err, 0, 0
|
||||
}
|
||||
|
||||
size := 0
|
||||
defer rows.Close()
|
||||
for rows.Next() {
|
||||
var key, rawval string
|
||||
if err := rows.Scan(&key, &rawval); err != nil {
|
||||
log.Warn("Error while scanning user uiconfig values")
|
||||
return err, 0, 0
|
||||
}
|
||||
|
||||
var val interface{}
|
||||
if err := json.Unmarshal([]byte(rawval), &val); err != nil {
|
||||
log.Warn("Error while unmarshaling raw user uiconfig json")
|
||||
return err, 0, 0
|
||||
}
|
||||
|
||||
size += len(key)
|
||||
size += len(rawval)
|
||||
uiconfig[key] = val
|
||||
}
|
||||
|
||||
// Add global ShortRunningJobsDuration setting as plot_list_hideShortRunningJobs
|
||||
uiconfig["plot_list_hideShortRunningJobs"] = config.Keys.ShortRunningJobsDuration
|
||||
|
||||
return uiconfig, 24 * time.Hour, size
|
||||
})
|
||||
if err, ok := data.(error); ok {
|
||||
log.Error("Error in returned dataset")
|
||||
func (r *UserRepository) GetUser(username string) (*schema.User, error) {
|
||||
user := &schema.User{Username: username}
|
||||
var hashedPassword, name, rawRoles, email, rawProjects sql.NullString
|
||||
if err := sq.Select("password", "ldap", "name", "roles", "email", "projects").From("user").
|
||||
Where("user.username = ?", username).RunWith(r.DB).
|
||||
QueryRow().Scan(&hashedPassword, &user.AuthSource, &name, &rawRoles, &email, &rawProjects); err != nil {
|
||||
log.Warnf("Error while querying user '%v' from database", username)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return data.(map[string]interface{}), nil
|
||||
}
|
||||
|
||||
// If the context does not have a user, update the global ui configuration
|
||||
// without persisting it! If there is a (authenticated) user, update only his
|
||||
// configuration.
|
||||
func (uCfg *UserCfgRepo) UpdateConfig(
|
||||
key, value string,
|
||||
user *auth.User) error {
|
||||
|
||||
if user == nil {
|
||||
var val interface{}
|
||||
if err := json.Unmarshal([]byte(value), &val); err != nil {
|
||||
log.Warn("Error while unmarshaling raw user config json")
|
||||
return err
|
||||
user.Password = hashedPassword.String
|
||||
user.Name = name.String
|
||||
user.Email = email.String
|
||||
if rawRoles.Valid {
|
||||
if err := json.Unmarshal([]byte(rawRoles.String), &user.Roles); err != nil {
|
||||
log.Warn("Error while unmarshaling raw roles from DB")
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
if rawProjects.Valid {
|
||||
if err := json.Unmarshal([]byte(rawProjects.String), &user.Projects); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
uCfg.lock.Lock()
|
||||
defer uCfg.lock.Unlock()
|
||||
uCfg.uiDefaults[key] = val
|
||||
return nil
|
||||
}
|
||||
|
||||
if _, err := uCfg.DB.Exec(`REPLACE INTO configuration (username, confkey, value) VALUES (?, ?, ?)`, user.Username, key, value); err != nil {
|
||||
log.Warnf("Error while replacing user config in DB for user '%v'", user.Username)
|
||||
return user, nil
|
||||
}
|
||||
|
||||
func (r *UserRepository) GetLdapUsernames() ([]string, error) {
|
||||
|
||||
var users []string
|
||||
rows, err := r.DB.Query(`SELECT username FROM user WHERE user.ldap = 1`)
|
||||
if err != nil {
|
||||
log.Warn("Error while querying usernames")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for rows.Next() {
|
||||
var username string
|
||||
if err := rows.Scan(&username); err != nil {
|
||||
log.Warnf("Error while scanning for user '%s'", username)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
users = append(users, username)
|
||||
}
|
||||
|
||||
return users, nil
|
||||
}
|
||||
|
||||
func (r *UserRepository) AddUser(user *schema.User) error {
|
||||
rolesJson, _ := json.Marshal(user.Roles)
|
||||
projectsJson, _ := json.Marshal(user.Projects)
|
||||
|
||||
cols := []string{"username", "roles", "projects"}
|
||||
vals := []interface{}{user.Username, string(rolesJson), string(projectsJson)}
|
||||
|
||||
if user.Name != "" {
|
||||
cols = append(cols, "name")
|
||||
vals = append(vals, user.Name)
|
||||
}
|
||||
if user.Email != "" {
|
||||
cols = append(cols, "email")
|
||||
vals = append(vals, user.Email)
|
||||
}
|
||||
if user.Password != "" {
|
||||
password, err := bcrypt.GenerateFromPassword([]byte(user.Password), bcrypt.DefaultCost)
|
||||
if err != nil {
|
||||
log.Error("Error while encrypting new user password")
|
||||
return err
|
||||
}
|
||||
cols = append(cols, "password")
|
||||
vals = append(vals, string(password))
|
||||
}
|
||||
if user.AuthSource != -1 {
|
||||
cols = append(cols, "ldap")
|
||||
vals = append(vals, int(user.AuthSource))
|
||||
}
|
||||
|
||||
if _, err := sq.Insert("user").Columns(cols...).Values(vals...).RunWith(r.DB).Exec(); err != nil {
|
||||
log.Errorf("Error while inserting new user '%v' into DB", user.Username)
|
||||
return err
|
||||
}
|
||||
|
||||
uCfg.cache.Del(user.Username)
|
||||
log.Infof("new user %#v created (roles: %s, auth-source: %d, projects: %s)", user.Username, rolesJson, user.AuthSource, projectsJson)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r *UserRepository) DelUser(username string) error {
|
||||
|
||||
_, err := r.DB.Exec(`DELETE FROM user WHERE user.username = ?`, username)
|
||||
if err != nil {
|
||||
log.Errorf("Error while deleting user '%s' from DB", username)
|
||||
return err
|
||||
}
|
||||
log.Infof("deleted user '%s' from DB", username)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r *UserRepository) ListUsers(specialsOnly bool) ([]*schema.User, error) {
|
||||
|
||||
q := sq.Select("username", "name", "email", "roles", "projects").From("user")
|
||||
if specialsOnly {
|
||||
q = q.Where("(roles != '[\"user\"]' AND roles != '[]')")
|
||||
}
|
||||
|
||||
rows, err := q.RunWith(r.DB).Query()
|
||||
if err != nil {
|
||||
log.Warn("Error while querying user list")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
users := make([]*schema.User, 0)
|
||||
defer rows.Close()
|
||||
for rows.Next() {
|
||||
rawroles := ""
|
||||
rawprojects := ""
|
||||
user := &schema.User{}
|
||||
var name, email sql.NullString
|
||||
if err := rows.Scan(&user.Username, &name, &email, &rawroles, &rawprojects); err != nil {
|
||||
log.Warn("Error while scanning user list")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := json.Unmarshal([]byte(rawroles), &user.Roles); err != nil {
|
||||
log.Warn("Error while unmarshaling raw role list")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := json.Unmarshal([]byte(rawprojects), &user.Projects); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
user.Name = name.String
|
||||
user.Email = email.String
|
||||
users = append(users, user)
|
||||
}
|
||||
return users, nil
|
||||
}
|
||||
|
||||
func (r *UserRepository) AddRole(
|
||||
ctx context.Context,
|
||||
username string,
|
||||
queryrole string) error {
|
||||
|
||||
newRole := strings.ToLower(queryrole)
|
||||
user, err := r.GetUser(username)
|
||||
if err != nil {
|
||||
log.Warnf("Could not load user '%s'", username)
|
||||
return err
|
||||
}
|
||||
|
||||
exists, valid := user.HasValidRole(newRole)
|
||||
|
||||
if !valid {
|
||||
return fmt.Errorf("Supplied role is no valid option : %v", newRole)
|
||||
}
|
||||
if exists {
|
||||
return fmt.Errorf("User %v already has role %v", username, newRole)
|
||||
}
|
||||
|
||||
roles, _ := json.Marshal(append(user.Roles, newRole))
|
||||
if _, err := sq.Update("user").Set("roles", roles).Where("user.username = ?", username).RunWith(r.DB).Exec(); err != nil {
|
||||
log.Errorf("Error while adding new role for user '%s'", user.Username)
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r *UserRepository) RemoveRole(ctx context.Context, username string, queryrole string) error {
|
||||
oldRole := strings.ToLower(queryrole)
|
||||
user, err := r.GetUser(username)
|
||||
if err != nil {
|
||||
log.Warnf("Could not load user '%s'", username)
|
||||
return err
|
||||
}
|
||||
|
||||
exists, valid := user.HasValidRole(oldRole)
|
||||
|
||||
if !valid {
|
||||
return fmt.Errorf("Supplied role is no valid option : %v", oldRole)
|
||||
}
|
||||
if !exists {
|
||||
return fmt.Errorf("Role already deleted for user '%v': %v", username, oldRole)
|
||||
}
|
||||
|
||||
if oldRole == schema.GetRoleString(schema.RoleManager) && len(user.Projects) != 0 {
|
||||
return fmt.Errorf("Cannot remove role 'manager' while user %s still has assigned project(s) : %v", username, user.Projects)
|
||||
}
|
||||
|
||||
var newroles []string
|
||||
for _, r := range user.Roles {
|
||||
if r != oldRole {
|
||||
newroles = append(newroles, r) // Append all roles not matching requested to be deleted role
|
||||
}
|
||||
}
|
||||
|
||||
var mroles, _ = json.Marshal(newroles)
|
||||
if _, err := sq.Update("user").Set("roles", mroles).Where("user.username = ?", username).RunWith(r.DB).Exec(); err != nil {
|
||||
log.Errorf("Error while removing role for user '%s'", user.Username)
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r *UserRepository) AddProject(
|
||||
ctx context.Context,
|
||||
username string,
|
||||
project string) error {
|
||||
|
||||
user, err := r.GetUser(username)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if !user.HasRole(schema.RoleManager) {
|
||||
return fmt.Errorf("user '%s' is not a manager!", username)
|
||||
}
|
||||
|
||||
if user.HasProject(project) {
|
||||
return fmt.Errorf("user '%s' already manages project '%s'", username, project)
|
||||
}
|
||||
|
||||
projects, _ := json.Marshal(append(user.Projects, project))
|
||||
if _, err := sq.Update("user").Set("projects", projects).Where("user.username = ?", username).RunWith(r.DB).Exec(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r *UserRepository) RemoveProject(ctx context.Context, username string, project string) error {
|
||||
user, err := r.GetUser(username)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if !user.HasRole(schema.RoleManager) {
|
||||
return fmt.Errorf("user '%#v' is not a manager!", username)
|
||||
}
|
||||
|
||||
if !user.HasProject(project) {
|
||||
return fmt.Errorf("user '%#v': Cannot remove project '%#v' - Does not match!", username, project)
|
||||
}
|
||||
|
||||
var exists bool
|
||||
var newprojects []string
|
||||
for _, p := range user.Projects {
|
||||
if p != project {
|
||||
newprojects = append(newprojects, p) // Append all projects not matching requested to be deleted project
|
||||
} else {
|
||||
exists = true
|
||||
}
|
||||
}
|
||||
|
||||
if exists == true {
|
||||
var result interface{}
|
||||
if len(newprojects) == 0 {
|
||||
result = "[]"
|
||||
} else {
|
||||
result, _ = json.Marshal(newprojects)
|
||||
}
|
||||
if _, err := sq.Update("user").Set("projects", result).Where("user.username = ?", username).RunWith(r.DB).Exec(); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
} else {
|
||||
return fmt.Errorf("user %s already does not manage project %s", username, project)
|
||||
}
|
||||
}
|
||||
|
||||
type ContextKey string
|
||||
|
||||
const ContextUserKey ContextKey = "user"
|
||||
|
||||
func GetUserFromContext(ctx context.Context) *schema.User {
|
||||
x := ctx.Value(ContextUserKey)
|
||||
if x == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
return x.(*schema.User)
|
||||
}
|
||||
|
||||
func (r *UserRepository) FetchUserInCtx(ctx context.Context, username string) (*model.User, error) {
|
||||
me := GetUserFromContext(ctx)
|
||||
if me != nil && me.Username != username &&
|
||||
me.HasNotRoles([]schema.Role{schema.RoleAdmin, schema.RoleSupport, schema.RoleManager}) {
|
||||
return nil, errors.New("forbidden")
|
||||
}
|
||||
|
||||
user := &model.User{Username: username}
|
||||
var name, email sql.NullString
|
||||
if err := sq.Select("name", "email").From("user").Where("user.username = ?", username).
|
||||
RunWith(r.DB).QueryRow().Scan(&name, &email); err != nil {
|
||||
if err == sql.ErrNoRows {
|
||||
/* This warning will be logged *often* for non-local users, i.e. users mentioned only in job-table or archive, */
|
||||
/* since FetchUser will be called to retrieve full name and mail for every job in query/list */
|
||||
// log.Warnf("User '%s' Not found in DB", username)
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
log.Warnf("Error while fetching user '%s'", username)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
user.Name = name.String
|
||||
user.Email = email.String
|
||||
return user, nil
|
||||
}
|
||||
|
137
internal/repository/userConfig.go
Normal file
137
internal/repository/userConfig.go
Normal file
@ -0,0 +1,137 @@
|
||||
// Copyright (C) 2022 NHR@FAU, University Erlangen-Nuremberg.
|
||||
// All rights reserved.
|
||||
// Use of this source code is governed by a MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
package repository
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/ClusterCockpit/cc-backend/internal/config"
|
||||
"github.com/ClusterCockpit/cc-backend/pkg/log"
|
||||
"github.com/ClusterCockpit/cc-backend/pkg/lrucache"
|
||||
"github.com/ClusterCockpit/cc-backend/pkg/schema"
|
||||
"github.com/jmoiron/sqlx"
|
||||
)
|
||||
|
||||
var (
|
||||
userCfgRepoOnce sync.Once
|
||||
userCfgRepoInstance *UserCfgRepo
|
||||
)
|
||||
|
||||
type UserCfgRepo struct {
|
||||
DB *sqlx.DB
|
||||
Lookup *sqlx.Stmt
|
||||
lock sync.RWMutex
|
||||
uiDefaults map[string]interface{}
|
||||
cache *lrucache.Cache
|
||||
}
|
||||
|
||||
func GetUserCfgRepo() *UserCfgRepo {
|
||||
userCfgRepoOnce.Do(func() {
|
||||
db := GetConnection()
|
||||
|
||||
lookupConfigStmt, err := db.DB.Preparex(`SELECT confkey, value FROM configuration WHERE configuration.username = ?`)
|
||||
if err != nil {
|
||||
log.Fatalf("db.DB.Preparex() error: %v", err)
|
||||
}
|
||||
|
||||
userCfgRepoInstance = &UserCfgRepo{
|
||||
DB: db.DB,
|
||||
Lookup: lookupConfigStmt,
|
||||
uiDefaults: config.Keys.UiDefaults,
|
||||
cache: lrucache.New(1024),
|
||||
}
|
||||
})
|
||||
|
||||
return userCfgRepoInstance
|
||||
}
|
||||
|
||||
// Return the personalised UI config for the currently authenticated
|
||||
// user or return the plain default config.
|
||||
func (uCfg *UserCfgRepo) GetUIConfig(user *schema.User) (map[string]interface{}, error) {
|
||||
if user == nil {
|
||||
uCfg.lock.RLock()
|
||||
copy := make(map[string]interface{}, len(uCfg.uiDefaults))
|
||||
for k, v := range uCfg.uiDefaults {
|
||||
copy[k] = v
|
||||
}
|
||||
uCfg.lock.RUnlock()
|
||||
return copy, nil
|
||||
}
|
||||
|
||||
data := uCfg.cache.Get(user.Username, func() (interface{}, time.Duration, int) {
|
||||
uiconfig := make(map[string]interface{}, len(uCfg.uiDefaults))
|
||||
for k, v := range uCfg.uiDefaults {
|
||||
uiconfig[k] = v
|
||||
}
|
||||
|
||||
rows, err := uCfg.Lookup.Query(user.Username)
|
||||
if err != nil {
|
||||
log.Warnf("Error while looking up user uiconfig for user '%v'", user.Username)
|
||||
return err, 0, 0
|
||||
}
|
||||
|
||||
size := 0
|
||||
defer rows.Close()
|
||||
for rows.Next() {
|
||||
var key, rawval string
|
||||
if err := rows.Scan(&key, &rawval); err != nil {
|
||||
log.Warn("Error while scanning user uiconfig values")
|
||||
return err, 0, 0
|
||||
}
|
||||
|
||||
var val interface{}
|
||||
if err := json.Unmarshal([]byte(rawval), &val); err != nil {
|
||||
log.Warn("Error while unmarshaling raw user uiconfig json")
|
||||
return err, 0, 0
|
||||
}
|
||||
|
||||
size += len(key)
|
||||
size += len(rawval)
|
||||
uiconfig[key] = val
|
||||
}
|
||||
|
||||
// Add global ShortRunningJobsDuration setting as plot_list_hideShortRunningJobs
|
||||
uiconfig["plot_list_hideShortRunningJobs"] = config.Keys.ShortRunningJobsDuration
|
||||
|
||||
return uiconfig, 24 * time.Hour, size
|
||||
})
|
||||
if err, ok := data.(error); ok {
|
||||
log.Error("Error in returned dataset")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return data.(map[string]interface{}), nil
|
||||
}
|
||||
|
||||
// If the context does not have a user, update the global ui configuration
|
||||
// without persisting it! If there is a (authenticated) user, update only his
|
||||
// configuration.
|
||||
func (uCfg *UserCfgRepo) UpdateConfig(
|
||||
key, value string,
|
||||
user *schema.User) error {
|
||||
|
||||
if user == nil {
|
||||
var val interface{}
|
||||
if err := json.Unmarshal([]byte(value), &val); err != nil {
|
||||
log.Warn("Error while unmarshaling raw user config json")
|
||||
return err
|
||||
}
|
||||
|
||||
uCfg.lock.Lock()
|
||||
defer uCfg.lock.Unlock()
|
||||
uCfg.uiDefaults[key] = val
|
||||
return nil
|
||||
}
|
||||
|
||||
if _, err := uCfg.DB.Exec(`REPLACE INTO configuration (username, confkey, value) VALUES (?, ?, ?)`, user.Username, key, value); err != nil {
|
||||
log.Warnf("Error while replacing user config in DB for user '%v'", user.Username)
|
||||
return err
|
||||
}
|
||||
|
||||
uCfg.cache.Del(user.Username)
|
||||
return nil
|
||||
}
|
@ -9,9 +9,9 @@ import (
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/ClusterCockpit/cc-backend/internal/auth"
|
||||
"github.com/ClusterCockpit/cc-backend/internal/config"
|
||||
"github.com/ClusterCockpit/cc-backend/pkg/log"
|
||||
"github.com/ClusterCockpit/cc-backend/pkg/schema"
|
||||
_ "github.com/mattn/go-sqlite3"
|
||||
)
|
||||
|
||||
@ -22,6 +22,9 @@ func setupUserTest(t *testing.T) *UserCfgRepo {
|
||||
"kind": "file",
|
||||
"path": "./var/job-archive"
|
||||
},
|
||||
"jwts": {
|
||||
"max-age": "2m"
|
||||
},
|
||||
"clusters": [
|
||||
{
|
||||
"name": "testcluster",
|
||||
@ -53,7 +56,7 @@ func setupUserTest(t *testing.T) *UserCfgRepo {
|
||||
|
||||
func TestGetUIConfig(t *testing.T) {
|
||||
r := setupUserTest(t)
|
||||
u := auth.User{Username: "demo"}
|
||||
u := schema.User{Username: "demo"}
|
||||
|
||||
cfg, err := r.GetUIConfig(&u)
|
||||
if err != nil {
|
@ -13,11 +13,11 @@ import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/ClusterCockpit/cc-backend/internal/auth"
|
||||
"github.com/ClusterCockpit/cc-backend/internal/graph/model"
|
||||
"github.com/ClusterCockpit/cc-backend/internal/repository"
|
||||
"github.com/ClusterCockpit/cc-backend/internal/util"
|
||||
"github.com/ClusterCockpit/cc-backend/pkg/log"
|
||||
"github.com/ClusterCockpit/cc-backend/pkg/schema"
|
||||
"github.com/ClusterCockpit/cc-backend/web"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
@ -81,12 +81,11 @@ func setupJobRoute(i InfoType, r *http.Request) InfoType {
|
||||
}
|
||||
|
||||
func setupUserRoute(i InfoType, r *http.Request) InfoType {
|
||||
jobRepo := repository.GetJobRepository()
|
||||
username := mux.Vars(r)["id"]
|
||||
i["id"] = username
|
||||
i["username"] = username
|
||||
// TODO: If forbidden (== err exists), redirect to error page
|
||||
if user, _ := auth.FetchUser(r.Context(), jobRepo.DB, username); user != nil {
|
||||
if user, _ := repository.GetUserRepository().FetchUserInCtx(r.Context(), username); user != nil {
|
||||
i["name"] = user.Name
|
||||
i["email"] = user.Email
|
||||
}
|
||||
@ -125,7 +124,7 @@ func setupAnalysisRoute(i InfoType, r *http.Request) InfoType {
|
||||
|
||||
func setupTaglistRoute(i InfoType, r *http.Request) InfoType {
|
||||
jobRepo := repository.GetJobRepository()
|
||||
user := auth.GetUser(r.Context())
|
||||
user := repository.GetUserFromContext(r.Context())
|
||||
|
||||
tags, counts, err := jobRepo.CountTags(user)
|
||||
tagMap := make(map[string][]map[string]interface{})
|
||||
@ -255,7 +254,7 @@ func SetupRoutes(router *mux.Router, buildInfo web.Build) {
|
||||
for _, route := range routes {
|
||||
route := route
|
||||
router.HandleFunc(route.Route, func(rw http.ResponseWriter, r *http.Request) {
|
||||
conf, err := userCfgRepo.GetUIConfig(auth.GetUser(r.Context()))
|
||||
conf, err := userCfgRepo.GetUIConfig(repository.GetUserFromContext(r.Context()))
|
||||
if err != nil {
|
||||
http.Error(rw, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
@ -268,9 +267,9 @@ func SetupRoutes(router *mux.Router, buildInfo web.Build) {
|
||||
}
|
||||
|
||||
// Get User -> What if NIL?
|
||||
user := auth.GetUser(r.Context())
|
||||
user := repository.GetUserFromContext(r.Context())
|
||||
// Get Roles
|
||||
availableRoles, _ := auth.GetValidRolesMap(user)
|
||||
availableRoles, _ := schema.GetValidRolesMap(user)
|
||||
|
||||
page := web.Page{
|
||||
Title: title,
|
||||
@ -285,14 +284,14 @@ func SetupRoutes(router *mux.Router, buildInfo web.Build) {
|
||||
page.FilterPresets = buildFilterPresets(r.URL.Query())
|
||||
}
|
||||
|
||||
web.RenderTemplate(rw, r, route.Template, &page)
|
||||
web.RenderTemplate(rw, route.Template, &page)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func HandleSearchBar(rw http.ResponseWriter, r *http.Request, buildInfo web.Build) {
|
||||
user := auth.GetUser(r.Context())
|
||||
availableRoles, _ := auth.GetValidRolesMap(user)
|
||||
user := repository.GetUserFromContext(r.Context())
|
||||
availableRoles, _ := schema.GetValidRolesMap(user)
|
||||
|
||||
if search := r.URL.Query().Get("searchId"); search != "" {
|
||||
repo := repository.GetJobRepository()
|
||||
@ -309,10 +308,10 @@ func HandleSearchBar(rw http.ResponseWriter, r *http.Request, buildInfo web.Buil
|
||||
case "arrayJobId":
|
||||
http.Redirect(rw, r, "/monitoring/jobs/?arrayJobId="+url.QueryEscape(strings.Trim(splitSearch[1], " ")), http.StatusFound) // All Users: Redirect to Tablequery
|
||||
case "username":
|
||||
if user.HasAnyRole([]auth.Role{auth.RoleAdmin, auth.RoleSupport, auth.RoleManager}) {
|
||||
if user.HasAnyRole([]schema.Role{schema.RoleAdmin, schema.RoleSupport, schema.RoleManager}) {
|
||||
http.Redirect(rw, r, "/monitoring/users/?user="+url.QueryEscape(strings.Trim(splitSearch[1], " ")), http.StatusFound)
|
||||
} else {
|
||||
web.RenderTemplate(rw, r, "message.tmpl", &web.Page{Title: "Error", MsgType: "alert-danger", Message: "Missing Access Rights", User: *user, Roles: availableRoles, Build: buildInfo})
|
||||
web.RenderTemplate(rw, "message.tmpl", &web.Page{Title: "Error", MsgType: "alert-danger", Message: "Missing Access Rights", User: *user, Roles: availableRoles, Build: buildInfo})
|
||||
}
|
||||
case "name":
|
||||
usernames, _ := repo.FindColumnValues(user, strings.Trim(splitSearch[1], " "), "user", "username", "name")
|
||||
@ -320,14 +319,14 @@ func HandleSearchBar(rw http.ResponseWriter, r *http.Request, buildInfo web.Buil
|
||||
joinedNames := strings.Join(usernames, "&user=")
|
||||
http.Redirect(rw, r, "/monitoring/users/?user="+joinedNames, http.StatusFound)
|
||||
} else {
|
||||
if user.HasAnyRole([]auth.Role{auth.RoleAdmin, auth.RoleSupport, auth.RoleManager}) {
|
||||
if user.HasAnyRole([]schema.Role{schema.RoleAdmin, schema.RoleSupport, schema.RoleManager}) {
|
||||
http.Redirect(rw, r, "/monitoring/users/?user=NoUserNameFound", http.StatusPermanentRedirect)
|
||||
} else {
|
||||
web.RenderTemplate(rw, r, "message.tmpl", &web.Page{Title: "Error", MsgType: "alert-danger", Message: "Missing Access Rights", User: *user, Roles: availableRoles, Build: buildInfo})
|
||||
web.RenderTemplate(rw, "message.tmpl", &web.Page{Title: "Error", MsgType: "alert-danger", Message: "Missing Access Rights", User: *user, Roles: availableRoles, Build: buildInfo})
|
||||
}
|
||||
}
|
||||
default:
|
||||
web.RenderTemplate(rw, r, "message.tmpl", &web.Page{Title: "Warning", MsgType: "alert-warning", Message: fmt.Sprintf("Unknown search type: %s", strings.Trim(splitSearch[0], " ")), User: *user, Roles: availableRoles, Build: buildInfo})
|
||||
web.RenderTemplate(rw, "message.tmpl", &web.Page{Title: "Warning", MsgType: "alert-warning", Message: fmt.Sprintf("Unknown search type: %s", strings.Trim(splitSearch[0], " ")), User: *user, Roles: availableRoles, Build: buildInfo})
|
||||
}
|
||||
} else if len(splitSearch) == 1 {
|
||||
|
||||
@ -342,13 +341,13 @@ func HandleSearchBar(rw http.ResponseWriter, r *http.Request, buildInfo web.Buil
|
||||
} else if jobname != "" {
|
||||
http.Redirect(rw, r, "/monitoring/jobs/?jobName="+url.QueryEscape(jobname), http.StatusFound) // JobName (contains)
|
||||
} else {
|
||||
web.RenderTemplate(rw, r, "message.tmpl", &web.Page{Title: "Info", MsgType: "alert-info", Message: "Search without result", User: *user, Roles: availableRoles, Build: buildInfo})
|
||||
web.RenderTemplate(rw, "message.tmpl", &web.Page{Title: "Info", MsgType: "alert-info", Message: "Search without result", User: *user, Roles: availableRoles, Build: buildInfo})
|
||||
}
|
||||
|
||||
} else {
|
||||
web.RenderTemplate(rw, r, "message.tmpl", &web.Page{Title: "Error", MsgType: "alert-danger", Message: "Searchbar query parameters malformed", User: *user, Roles: availableRoles, Build: buildInfo})
|
||||
web.RenderTemplate(rw, "message.tmpl", &web.Page{Title: "Error", MsgType: "alert-danger", Message: "Searchbar query parameters malformed", User: *user, Roles: availableRoles, Build: buildInfo})
|
||||
}
|
||||
} else {
|
||||
web.RenderTemplate(rw, r, "message.tmpl", &web.Page{Title: "Warning", MsgType: "alert-warning", Message: "Empty search", User: *user, Roles: availableRoles, Build: buildInfo})
|
||||
web.RenderTemplate(rw, "message.tmpl", &web.Page{Title: "Warning", MsgType: "alert-warning", Message: "Empty search", User: *user, Roles: availableRoles, Build: buildInfo})
|
||||
}
|
||||
}
|
||||
|
14
internal/util/array.go
Normal file
14
internal/util/array.go
Normal file
@ -0,0 +1,14 @@
|
||||
// Copyright (C) 2023 NHR@FAU, University Erlangen-Nuremberg.
|
||||
// All rights reserved.
|
||||
// Use of this source code is governed by a MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
package util
|
||||
|
||||
func Contains[T comparable](items []T, item T) bool {
|
||||
for _, v := range items {
|
||||
if v == item {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
@ -15,24 +15,31 @@ type LdapConfig struct {
|
||||
SearchDN string `json:"search_dn"`
|
||||
UserBind string `json:"user_bind"`
|
||||
UserFilter string `json:"user_filter"`
|
||||
UserAttr string `json:"username_attr"`
|
||||
SyncInterval string `json:"sync_interval"` // Parsed using time.ParseDuration.
|
||||
SyncDelOldUsers bool `json:"sync_del_old_users"`
|
||||
|
||||
// Should an non-existent user be added to the DB if user exists in ldap directory
|
||||
SyncUserOnLogin bool `json:"syncUserOnLogin"`
|
||||
}
|
||||
|
||||
type JWTAuthConfig struct {
|
||||
// Specifies for how long a session or JWT shall be valid
|
||||
// Specifies for how long a JWT token shall be valid
|
||||
// as a string parsable by time.ParseDuration().
|
||||
MaxAge int64 `json:"max-age"`
|
||||
MaxAge string `json:"max-age"`
|
||||
|
||||
// Specifies which cookie should be checked for a JWT token (if no authorization header is present)
|
||||
CookieName string `json:"cookieName"`
|
||||
|
||||
// Deny login for users not in database (but defined in JWT).
|
||||
// Ignore user roles defined in JWTs ('roles' claim), get them from db.
|
||||
ForceJWTValidationViaDatabase bool `json:"forceJWTValidationViaDatabase"`
|
||||
ValidateUser bool `json:"validateUser"`
|
||||
|
||||
// Specifies which issuer should be accepted when validating external JWTs ('iss' claim)
|
||||
TrustedExternalIssuer string `json:"trustedExternalIssuer"`
|
||||
TrustedIssuer string `json:"trustedIssuer"`
|
||||
|
||||
// Should an non-existent user be added to the DB based on the information in the token
|
||||
SyncUserOnLogin bool `json:"syncUserOnLogin"`
|
||||
}
|
||||
|
||||
type IntRange struct {
|
||||
@ -69,6 +76,9 @@ type ProgramConfig struct {
|
||||
// Address where the http (or https) server will listen on (for example: 'localhost:80').
|
||||
Addr string `json:"addr"`
|
||||
|
||||
// Addresses from which the /api/secured/* API endpoints can be reached
|
||||
ApiAllowedIPs []string `json:"apiAllowedIPs"`
|
||||
|
||||
// Drop root permissions once .env was read and the port was taken.
|
||||
User string `json:"user"`
|
||||
Group string `json:"group"`
|
||||
@ -102,7 +112,7 @@ type ProgramConfig struct {
|
||||
LdapConfig *LdapConfig `json:"ldap"`
|
||||
JwtConfig *JWTAuthConfig `json:"jwts"`
|
||||
|
||||
// If 0 or empty, the session/token does not expire!
|
||||
// If 0 or empty, the session does not expire!
|
||||
SessionMaxAge string `json:"session-max-age"`
|
||||
|
||||
// If both those options are not empty, use HTTPS using those certificates.
|
||||
@ -113,7 +123,7 @@ type ProgramConfig struct {
|
||||
// redirect every request incoming at port 80 to that url.
|
||||
RedirectHttpTo string `json:"redirect-http-to"`
|
||||
|
||||
// If overwriten, at least all the options in the defaults below must
|
||||
// If overwritten, at least all the options in the defaults below must
|
||||
// be provided! Most options here can be overwritten by the user.
|
||||
UiDefaults map[string]interface{} `json:"ui-defaults"`
|
||||
|
||||
|
@ -107,10 +107,6 @@
|
||||
"description": "Specifies for how long a session shall be valid as a string parsable by time.ParseDuration(). If 0 or empty, the session/token does not expire!",
|
||||
"type": "string"
|
||||
},
|
||||
"jwt-max-age": {
|
||||
"description": "Specifies for how long a JWT token shall be valid as a string parsable by time.ParseDuration(). If 0 or empty, the session/token does not expire!",
|
||||
"type": "string"
|
||||
},
|
||||
"https-cert-file": {
|
||||
"description": "Filepath to SSL certificate. If also https-key-file is set use HTTPS using those certificates.",
|
||||
"type": "string"
|
||||
@ -131,9 +127,34 @@
|
||||
"description": "Do not show running jobs shorter than X seconds.",
|
||||
"type": "integer"
|
||||
},
|
||||
"": {
|
||||
"description": "",
|
||||
"type": "string"
|
||||
"jwts": {
|
||||
"description": "For JWT token authentication.",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"max-age": {
|
||||
"description": "Configure how long a token is valid. As string parsable by time.ParseDuration()",
|
||||
"type": "string"
|
||||
},
|
||||
"cookieName": {
|
||||
"description": "Cookie that should be checked for a JWT token.",
|
||||
"type": "string"
|
||||
},
|
||||
"validateUser": {
|
||||
"description": "Deny login for users not in database (but defined in JWT). Overwrite roles in JWT with database roles.",
|
||||
"type": "boolean"
|
||||
},
|
||||
"trustedIssuer": {
|
||||
"description": "Issuer that should be accepted when validating external JWTs ",
|
||||
"type": "string"
|
||||
},
|
||||
"syncUserOnLogin": {
|
||||
"description": "Add non-existent user to DB at login attempt with values provided in JWT.",
|
||||
"type": "boolean"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"max-age"
|
||||
]
|
||||
},
|
||||
"ldap": {
|
||||
"description": "For LDAP Authentication and user synchronisation.",
|
||||
@ -159,6 +180,10 @@
|
||||
"description": "Filter to extract users for syncing.",
|
||||
"type": "string"
|
||||
},
|
||||
"username_attr": {
|
||||
"description": "Attribute with full username. Default: gecos",
|
||||
"type": "string"
|
||||
},
|
||||
"sync_interval": {
|
||||
"description": "Interval used for syncing local user table with LDAP directory. Parsed using time.ParseDuration.",
|
||||
"type": "string"
|
||||
@ -166,6 +191,10 @@
|
||||
"sync_del_old_users": {
|
||||
"description": "Delete obsolete users in database.",
|
||||
"type": "boolean"
|
||||
},
|
||||
"syncUserOnLogin": {
|
||||
"description": "Add non-existent user to DB at login attempt if user exists in Ldap directory",
|
||||
"type": "boolean"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
@ -398,6 +427,7 @@
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"jwts",
|
||||
"clusters"
|
||||
]
|
||||
}
|
||||
|
201
pkg/schema/user.go
Normal file
201
pkg/schema/user.go
Normal file
@ -0,0 +1,201 @@
|
||||
// Copyright (C) 2023 NHR@FAU, University Erlangen-Nuremberg.
|
||||
// All rights reserved.
|
||||
// Use of this source code is governed by a MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
package schema
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type Role int
|
||||
|
||||
const (
|
||||
RoleAnonymous Role = iota
|
||||
RoleApi
|
||||
RoleUser
|
||||
RoleManager
|
||||
RoleSupport
|
||||
RoleAdmin
|
||||
RoleError
|
||||
)
|
||||
|
||||
type AuthSource int
|
||||
|
||||
const (
|
||||
AuthViaLocalPassword AuthSource = iota
|
||||
AuthViaLDAP
|
||||
AuthViaToken
|
||||
AuthViaAll
|
||||
)
|
||||
|
||||
type AuthType int
|
||||
|
||||
const (
|
||||
AuthToken AuthType = iota
|
||||
AuthSession
|
||||
)
|
||||
|
||||
type User struct {
|
||||
Username string `json:"username"`
|
||||
Password string `json:"-"`
|
||||
Name string `json:"name"`
|
||||
Roles []string `json:"roles"`
|
||||
AuthType AuthType `json:"authType"`
|
||||
AuthSource AuthSource `json:"authSource"`
|
||||
Email string `json:"email"`
|
||||
Projects []string `json:"projects"`
|
||||
}
|
||||
|
||||
func (u *User) HasProject(project string) bool {
|
||||
for _, p := range u.Projects {
|
||||
if p == project {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func GetRoleString(roleInt Role) string {
|
||||
return [6]string{"anonymous", "api", "user", "manager", "support", "admin"}[roleInt]
|
||||
}
|
||||
|
||||
func getRoleEnum(roleStr string) Role {
|
||||
switch strings.ToLower(roleStr) {
|
||||
case "admin":
|
||||
return RoleAdmin
|
||||
case "support":
|
||||
return RoleSupport
|
||||
case "manager":
|
||||
return RoleManager
|
||||
case "user":
|
||||
return RoleUser
|
||||
case "api":
|
||||
return RoleApi
|
||||
case "anonymous":
|
||||
return RoleAnonymous
|
||||
default:
|
||||
return RoleError
|
||||
}
|
||||
}
|
||||
|
||||
func IsValidRole(role string) bool {
|
||||
return getRoleEnum(role) != RoleError
|
||||
}
|
||||
|
||||
func (u *User) HasValidRole(role string) (hasRole bool, isValid bool) {
|
||||
if IsValidRole(role) {
|
||||
for _, r := range u.Roles {
|
||||
if r == role {
|
||||
return true, true
|
||||
}
|
||||
}
|
||||
return false, true
|
||||
}
|
||||
return false, false
|
||||
}
|
||||
|
||||
func (u *User) HasRole(role Role) bool {
|
||||
for _, r := range u.Roles {
|
||||
if r == GetRoleString(role) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// Role-Arrays are short: performance not impacted by nested loop
|
||||
func (u *User) HasAnyRole(queryroles []Role) bool {
|
||||
for _, ur := range u.Roles {
|
||||
for _, qr := range queryroles {
|
||||
if ur == GetRoleString(qr) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// Role-Arrays are short: performance not impacted by nested loop
|
||||
func (u *User) HasAllRoles(queryroles []Role) bool {
|
||||
target := len(queryroles)
|
||||
matches := 0
|
||||
for _, ur := range u.Roles {
|
||||
for _, qr := range queryroles {
|
||||
if ur == GetRoleString(qr) {
|
||||
matches += 1
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if matches == target {
|
||||
return true
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
// Role-Arrays are short: performance not impacted by nested loop
|
||||
func (u *User) HasNotRoles(queryroles []Role) bool {
|
||||
matches := 0
|
||||
for _, ur := range u.Roles {
|
||||
for _, qr := range queryroles {
|
||||
if ur == GetRoleString(qr) {
|
||||
matches += 1
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if matches == 0 {
|
||||
return true
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
// Called by API endpoint '/roles/' from frontend: Only required for admin config -> Check Admin Role
|
||||
func GetValidRoles(user *User) ([]string, error) {
|
||||
var vals []string
|
||||
if user.HasRole(RoleAdmin) {
|
||||
for i := RoleApi; i < RoleError; i++ {
|
||||
vals = append(vals, GetRoleString(i))
|
||||
}
|
||||
return vals, nil
|
||||
}
|
||||
|
||||
return vals, fmt.Errorf("%s: only admins are allowed to fetch a list of roles", user.Username)
|
||||
}
|
||||
|
||||
// Called by routerConfig web.page setup in backend: Only requires known user
|
||||
func GetValidRolesMap(user *User) (map[string]Role, error) {
|
||||
named := make(map[string]Role)
|
||||
if user.HasNotRoles([]Role{RoleAnonymous}) {
|
||||
for i := RoleApi; i < RoleError; i++ {
|
||||
named[GetRoleString(i)] = i
|
||||
}
|
||||
return named, nil
|
||||
}
|
||||
return named, fmt.Errorf("only known users are allowed to fetch a list of roles")
|
||||
}
|
||||
|
||||
// Find highest role
|
||||
func (u *User) GetAuthLevel() Role {
|
||||
if u.HasRole(RoleAdmin) {
|
||||
return RoleAdmin
|
||||
} else if u.HasRole(RoleSupport) {
|
||||
return RoleSupport
|
||||
} else if u.HasRole(RoleManager) {
|
||||
return RoleManager
|
||||
} else if u.HasRole(RoleUser) {
|
||||
return RoleUser
|
||||
} else if u.HasRole(RoleApi) {
|
||||
return RoleApi
|
||||
} else if u.HasRole(RoleAnonymous) {
|
||||
return RoleAnonymous
|
||||
} else {
|
||||
return RoleError
|
||||
}
|
||||
}
|
@ -2,7 +2,7 @@
|
||||
// All rights reserved.
|
||||
// Use of this source code is governed by a MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
package auth
|
||||
package schema
|
||||
|
||||
import (
|
||||
"testing"
|
@ -11,6 +11,9 @@ import (
|
||||
|
||||
func TestValidateConfig(t *testing.T) {
|
||||
json := []byte(`{
|
||||
"jwts": {
|
||||
"max-age": "2m"
|
||||
},
|
||||
"clusters": [
|
||||
{
|
||||
"name": "testcluster",
|
||||
@ -21,9 +24,7 @@ func TestValidateConfig(t *testing.T) {
|
||||
"numNodes": { "from": 1, "to": 64 },
|
||||
"duration": { "from": 0, "to": 86400 },
|
||||
"startTime": { "from": "2022-01-01T00:00:00Z", "to": null }
|
||||
}
|
||||
}
|
||||
]
|
||||
}}]
|
||||
}`)
|
||||
|
||||
if err := Validate(Config, bytes.NewReader(json)); err != nil {
|
||||
|
231
web/frontend/package-lock.json
generated
231
web/frontend/package-lock.json
generated
@ -11,8 +11,10 @@
|
||||
"dependencies": {
|
||||
"@rollup/plugin-replace": "^5.0.2",
|
||||
"@urql/svelte": "^4.0.1",
|
||||
"chart.js": "^4.3.3",
|
||||
"graphql": "^16.6.0",
|
||||
"sveltestrap": "^5.10.0",
|
||||
"svelte-chartjs": "^3.1.2",
|
||||
"sveltestrap": "^5.11.1",
|
||||
"uplot": "^1.6.24",
|
||||
"wonka": "^6.3.2"
|
||||
},
|
||||
@ -27,9 +29,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@0no-co/graphql.web": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/@0no-co/graphql.web/-/graphql.web-1.0.1.tgz",
|
||||
"integrity": "sha512-6Yaxyv6rOwRkLIvFaL0NrLDgfNqC/Ng9QOPmTmlqW4mORXMEKmh5NYGkIvvt5Yw8fZesnMAqkj8cIqTj8f40cQ==",
|
||||
"version": "1.0.4",
|
||||
"resolved": "https://registry.npmjs.org/@0no-co/graphql.web/-/graphql.web-1.0.4.tgz",
|
||||
"integrity": "sha512-W3ezhHGfO0MS1PtGloaTpg0PbaT8aZSmmaerL7idtU5F7oCI+uu25k+MsMS31BVFlp4aMkHSrNRxiD72IlK8TA==",
|
||||
"peerDependencies": {
|
||||
"graphql": "^14.0.0 || ^15.0.0 || ^16.0.0"
|
||||
},
|
||||
@ -40,9 +42,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@jridgewell/gen-mapping": {
|
||||
"version": "0.3.2",
|
||||
"resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.2.tgz",
|
||||
"integrity": "sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A==",
|
||||
"version": "0.3.3",
|
||||
"resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.3.tgz",
|
||||
"integrity": "sha512-HLhSWOLRi875zjjMG/r+Nv0oCW8umGb0BgEhyX3dDX3egwZtB8PqLnjz3yedt8R5StBrzcg4aBpnh8UA9D1BoQ==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@jridgewell/set-array": "^1.0.1",
|
||||
@ -54,9 +56,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@jridgewell/resolve-uri": {
|
||||
"version": "3.1.0",
|
||||
"resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz",
|
||||
"integrity": "sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w==",
|
||||
"version": "3.1.1",
|
||||
"resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.1.tgz",
|
||||
"integrity": "sha512-dSYZh7HhCDtCKm4QakX0xFpsRDqjjtZf/kjI/v3T3Nwt5r8/qz/M19F9ySyOqU94SXBmeG9ttTul+YnR4LOxFA==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">=6.0.0"
|
||||
@ -72,9 +74,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@jridgewell/source-map": {
|
||||
"version": "0.3.2",
|
||||
"resolved": "https://registry.npmjs.org/@jridgewell/source-map/-/source-map-0.3.2.tgz",
|
||||
"integrity": "sha512-m7O9o2uR8k2ObDysZYzdfhb08VuEml5oWGiosa1VdaPZ/A6QyPkAJuwN0Q1lhULOf6B7MtQmHENS743hWtCrgw==",
|
||||
"version": "0.3.5",
|
||||
"resolved": "https://registry.npmjs.org/@jridgewell/source-map/-/source-map-0.3.5.tgz",
|
||||
"integrity": "sha512-UTYAUj/wviwdsMfzoSJspJxbkH5o1snzwX0//0ENX1u/55kkZZkcTZP6u9bwKGkv+dkk9at4m1Cpt0uY80kcpQ==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@jridgewell/gen-mapping": "^0.3.0",
|
||||
@ -82,24 +84,29 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@jridgewell/sourcemap-codec": {
|
||||
"version": "1.4.14",
|
||||
"resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz",
|
||||
"integrity": "sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw=="
|
||||
"version": "1.4.15",
|
||||
"resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz",
|
||||
"integrity": "sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg=="
|
||||
},
|
||||
"node_modules/@jridgewell/trace-mapping": {
|
||||
"version": "0.3.14",
|
||||
"resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.14.tgz",
|
||||
"integrity": "sha512-bJWEfQ9lPTvm3SneWwRFVLzrh6nhjwqw7TUFFBEMzwvg7t7PCDenf2lDwqo4NQXzdpgBXyFgDWnQA+2vkruksQ==",
|
||||
"version": "0.3.19",
|
||||
"resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.19.tgz",
|
||||
"integrity": "sha512-kf37QtfW+Hwx/buWGMPcR60iF9ziHa6r/CZJIHbmcm4+0qrXiVdxegAH0F6yddEVQ7zdkjcGCgCzUu+BcbhQxw==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@jridgewell/resolve-uri": "^3.0.3",
|
||||
"@jridgewell/sourcemap-codec": "^1.4.10"
|
||||
"@jridgewell/resolve-uri": "^3.1.0",
|
||||
"@jridgewell/sourcemap-codec": "^1.4.14"
|
||||
}
|
||||
},
|
||||
"node_modules/@kurkle/color": {
|
||||
"version": "0.3.2",
|
||||
"resolved": "https://registry.npmjs.org/@kurkle/color/-/color-0.3.2.tgz",
|
||||
"integrity": "sha512-fuscdXJ9G1qb7W8VdHi+IwRqij3lBkosAm4ydQtEmbY58OzHXqQhvlxqEkoz0yssNVn38bcpRWgA9PP+OGoisw=="
|
||||
},
|
||||
"node_modules/@popperjs/core": {
|
||||
"version": "2.11.0",
|
||||
"resolved": "https://registry.npmjs.org/@popperjs/core/-/core-2.11.0.tgz",
|
||||
"integrity": "sha512-zrsUxjLOKAzdewIDRWy9nsV1GQsKBCWaGwsZQlCgr6/q+vjyZhFgqedLfFBuI9anTPEUT4APq9Mu0SZBTzIcGQ==",
|
||||
"version": "2.11.8",
|
||||
"resolved": "https://registry.npmjs.org/@popperjs/core/-/core-2.11.8.tgz",
|
||||
"integrity": "sha512-P1st0aksCrn9sGZhp8GMYwBnQsbvAWsZAX44oXNNvLHGqAOcoVxmjZiohstwQ7SqKnbR47akdNi+uleWD8+g6A==",
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/popperjs"
|
||||
@ -131,9 +138,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@rollup/plugin-node-resolve": {
|
||||
"version": "15.0.2",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/plugin-node-resolve/-/plugin-node-resolve-15.0.2.tgz",
|
||||
"integrity": "sha512-Y35fRGUjC3FaurG722uhUuG8YHOJRJQbI6/CkbRkdPotSpDj9NtIN85z1zrcyDcCQIW4qp5mgG72U+gJ0TAFEg==",
|
||||
"version": "15.2.1",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/plugin-node-resolve/-/plugin-node-resolve-15.2.1.tgz",
|
||||
"integrity": "sha512-nsbUg588+GDSu8/NS8T4UAshO6xeaOfINNuXeVHcKV02LJtoRaM1SiOacClw4kws1SFiNhdLGxlbMY9ga/zs/w==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@rollup/pluginutils": "^5.0.1",
|
||||
@ -176,14 +183,14 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@rollup/plugin-terser": {
|
||||
"version": "0.4.1",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/plugin-terser/-/plugin-terser-0.4.1.tgz",
|
||||
"integrity": "sha512-aKS32sw5a7hy+fEXVy+5T95aDIwjpGHCTv833HXVtyKMDoVS7pBr5K3L9hEQoNqbJFjfANPrNpIXlTQ7is00eA==",
|
||||
"version": "0.4.3",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/plugin-terser/-/plugin-terser-0.4.3.tgz",
|
||||
"integrity": "sha512-EF0oejTMtkyhrkwCdg0HJ0IpkcaVg1MMSf2olHb2Jp+1mnLM04OhjpJWGma4HobiDTF0WCyViWuvadyE9ch2XA==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"serialize-javascript": "^6.0.0",
|
||||
"smob": "^0.0.6",
|
||||
"terser": "^5.15.1"
|
||||
"serialize-javascript": "^6.0.1",
|
||||
"smob": "^1.0.0",
|
||||
"terser": "^5.17.4"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=14.0.0"
|
||||
@ -197,19 +204,10 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/@rollup/plugin-terser/node_modules/serialize-javascript": {
|
||||
"version": "6.0.1",
|
||||
"resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.1.tgz",
|
||||
"integrity": "sha512-owoXEFjWRllis8/M1Q+Cw5k8ZH40e3zhp/ovX+Xr/vi1qj6QesbyXXViFbpNvWvPNAD62SutwEXavefrLJWj7w==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"randombytes": "^2.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@rollup/pluginutils": {
|
||||
"version": "5.0.2",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/pluginutils/-/pluginutils-5.0.2.tgz",
|
||||
"integrity": "sha512-pTd9rIsP92h+B6wWwFbW8RkZv4hiR/xKsqre4SIuAOaOEQRxi0lqLke9k2/7WegC85GgUs9pjmOjCUi3In4vwA==",
|
||||
"version": "5.0.3",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/pluginutils/-/pluginutils-5.0.3.tgz",
|
||||
"integrity": "sha512-hfllNN4a80rwNQ9QCxhxuHCGHMAvabXqxNdaChUSSadMre7t4iEUI6fFAhBOn/eIYTgYVhBv7vCLsAJ4u3lf3g==",
|
||||
"dependencies": {
|
||||
"@types/estree": "^1.0.0",
|
||||
"estree-walker": "^2.0.2",
|
||||
@ -239,30 +237,30 @@
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@urql/core": {
|
||||
"version": "4.0.7",
|
||||
"resolved": "https://registry.npmjs.org/@urql/core/-/core-4.0.7.tgz",
|
||||
"integrity": "sha512-UtZ9oSbSFODXzFydgLCXpAQz26KGT1d6uEfcylKphiRWNXSWZi8k7vhJXNceNm/Dn0MiZ+kaaJHKcnGY1jvHRQ==",
|
||||
"version": "4.1.1",
|
||||
"resolved": "https://registry.npmjs.org/@urql/core/-/core-4.1.1.tgz",
|
||||
"integrity": "sha512-iIoAy6BY+BUZZ7KIpnMT7C9q+ULf5ZCVxGe3/i7WZSJBrQa2h1QkIMhL+8fAKmOn9gt83jSIv5drWWnhZ9izEA==",
|
||||
"dependencies": {
|
||||
"@0no-co/graphql.web": "^1.0.1",
|
||||
"wonka": "^6.3.2"
|
||||
}
|
||||
},
|
||||
"node_modules/@urql/svelte": {
|
||||
"version": "4.0.1",
|
||||
"resolved": "https://registry.npmjs.org/@urql/svelte/-/svelte-4.0.1.tgz",
|
||||
"integrity": "sha512-WbsVjuK7IUNlJlvXAgevjQunoso0T+AngFlb0zafDvay6HN47Zc3CSVbAlP8KjETjERUMJLuiqknmPFFm2GEFQ==",
|
||||
"version": "4.0.4",
|
||||
"resolved": "https://registry.npmjs.org/@urql/svelte/-/svelte-4.0.4.tgz",
|
||||
"integrity": "sha512-HYz9dHdqEcs9d82WWczQ3XG+zuup3TS01H+txaij/QfQ+KHjrlrn0EkOHQQd1S+H8+nFjFU2x9+HE3+3fuwL1A==",
|
||||
"dependencies": {
|
||||
"@urql/core": "^4.0.0",
|
||||
"@urql/core": "^4.1.0",
|
||||
"wonka": "^6.3.2"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"svelte": "^3.0.0"
|
||||
"svelte": "^3.0.0 || ^4.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/acorn": {
|
||||
"version": "8.8.0",
|
||||
"resolved": "https://registry.npmjs.org/acorn/-/acorn-8.8.0.tgz",
|
||||
"integrity": "sha512-QOxyigPVrpZ2GXT+PFyZTl6TtOFc5egxHIP9IlQ+RbupQuX4RkT/Bee4/kQuC02Xkzg84JcT7oLYtDIQxp+v7w==",
|
||||
"version": "8.10.0",
|
||||
"resolved": "https://registry.npmjs.org/acorn/-/acorn-8.10.0.tgz",
|
||||
"integrity": "sha512-F0SAmZ8iUtS//m8DmCTA0jlh6TDKkHQyK6xc6V4KDTyZKA9dnvX9/3sRTVQrWm79glUAZbnmmNcdYwUIHWVybw==",
|
||||
"dev": true,
|
||||
"bin": {
|
||||
"acorn": "bin/acorn"
|
||||
@ -304,6 +302,17 @@
|
||||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/chart.js": {
|
||||
"version": "4.3.3",
|
||||
"resolved": "https://registry.npmjs.org/chart.js/-/chart.js-4.3.3.tgz",
|
||||
"integrity": "sha512-aTk7pBw+x6sQYhon/NR3ikfUJuym/LdgpTlgZRe2PaEhjUMKBKyNaFCMVRAyTEWYFNO7qRu7iQVqOw/OqzxZxQ==",
|
||||
"dependencies": {
|
||||
"@kurkle/color": "^0.3.0"
|
||||
},
|
||||
"engines": {
|
||||
"pnpm": ">=7"
|
||||
}
|
||||
},
|
||||
"node_modules/commander": {
|
||||
"version": "2.20.3",
|
||||
"resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz",
|
||||
@ -313,13 +322,13 @@
|
||||
"node_modules/commondir": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/commondir/-/commondir-1.0.1.tgz",
|
||||
"integrity": "sha1-3dgA2gxmEnOTzKWVDqloo6rxJTs= sha512-W9pAhw0ja1Edb5GVdIF1mjZw/ASI0AlShXM83UUGe2DVr5TdAPEA1OA8m/g8zWp9x6On7gqufY+FatDbC3MDQg==",
|
||||
"integrity": "sha512-W9pAhw0ja1Edb5GVdIF1mjZw/ASI0AlShXM83UUGe2DVr5TdAPEA1OA8m/g8zWp9x6On7gqufY+FatDbC3MDQg==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/deepmerge": {
|
||||
"version": "4.2.2",
|
||||
"resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.2.2.tgz",
|
||||
"integrity": "sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg==",
|
||||
"version": "4.3.1",
|
||||
"resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.3.1.tgz",
|
||||
"integrity": "sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
@ -337,9 +346,9 @@
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/fsevents": {
|
||||
"version": "2.3.2",
|
||||
"resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz",
|
||||
"integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==",
|
||||
"version": "2.3.3",
|
||||
"resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz",
|
||||
"integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==",
|
||||
"dev": true,
|
||||
"hasInstallScript": true,
|
||||
"optional": true,
|
||||
@ -376,9 +385,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/graphql": {
|
||||
"version": "16.6.0",
|
||||
"resolved": "https://registry.npmjs.org/graphql/-/graphql-16.6.0.tgz",
|
||||
"integrity": "sha512-KPIBPDlW7NxrbT/eh4qPXz5FiFdL5UbaA0XUNz2Rp3Z3hqBSkbj0GVjwFDztsWVauZUWsbKHgMg++sk8UX0bkw==",
|
||||
"version": "16.8.0",
|
||||
"resolved": "https://registry.npmjs.org/graphql/-/graphql-16.8.0.tgz",
|
||||
"integrity": "sha512-0oKGaR+y3qcS5mCu1vb7KG+a89vjn06C7Ihq/dDl3jA+A8B3TKomvi3CiEcVLJQGalbu8F52LxkOym7U5sSfbg==",
|
||||
"engines": {
|
||||
"node": "^12.22.0 || ^14.16.0 || ^16.0.0 || >=17.0.0"
|
||||
}
|
||||
@ -427,9 +436,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/is-core-module": {
|
||||
"version": "2.12.0",
|
||||
"resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.12.0.tgz",
|
||||
"integrity": "sha512-RECHCBCd/viahWmwj6enj19sKbHfJrddi/6cBDsNTKbNq0f7VeaUkBo60BqzvPqo/W54ChS62Z5qyun7cfOMqQ==",
|
||||
"version": "2.13.0",
|
||||
"resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.13.0.tgz",
|
||||
"integrity": "sha512-Z7dk6Qo8pOCp3l4tsX2C5ZVas4V+UxwQodwZhLopL91TX8UyyHEXafPcyoeeWuLrwzHcr3igO78wNLwHJHsMCQ==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"has": "^1.0.3"
|
||||
@ -441,7 +450,7 @@
|
||||
"node_modules/is-module": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/is-module/-/is-module-1.0.0.tgz",
|
||||
"integrity": "sha1-Mlj7afeMFNW4FdZkM2tM/7ZEFZE= sha512-51ypPSPCoTEIN9dy5Oy+h4pShgJmPCygKfyRCISBI+JoWT/2oJvK8QPxmwv7b/p239jXrm9M1mlQbyKJ5A152g==",
|
||||
"integrity": "sha512-51ypPSPCoTEIN9dy5Oy+h4pShgJmPCygKfyRCISBI+JoWT/2oJvK8QPxmwv7b/p239jXrm9M1mlQbyKJ5A152g==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/is-reference": {
|
||||
@ -512,12 +521,12 @@
|
||||
}
|
||||
},
|
||||
"node_modules/resolve": {
|
||||
"version": "1.22.2",
|
||||
"resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.2.tgz",
|
||||
"integrity": "sha512-Sb+mjNHOULsBv818T40qSPeRiuWLyaGMa5ewydRLFimneixmVy2zdivRl+AF6jaYPC8ERxGDmFSiqui6SfPd+g==",
|
||||
"version": "1.22.4",
|
||||
"resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.4.tgz",
|
||||
"integrity": "sha512-PXNdCiPqDqeUou+w1C2eTQbNfxKSuMxqTCuvlmmMsk1NWHL5fRrhY6Pl0qEYYc6+QqGClco1Qj8XnjPego4wfg==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"is-core-module": "^2.11.0",
|
||||
"is-core-module": "^2.13.0",
|
||||
"path-parse": "^1.0.7",
|
||||
"supports-preserve-symlinks-flag": "^1.0.0"
|
||||
},
|
||||
@ -538,9 +547,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/rollup": {
|
||||
"version": "3.21.0",
|
||||
"resolved": "https://registry.npmjs.org/rollup/-/rollup-3.21.0.tgz",
|
||||
"integrity": "sha512-ANPhVcyeHvYdQMUyCbczy33nbLzI7RzrBje4uvNiTDJGIMtlKoOStmympwr9OtS1LZxiDmE2wvxHyVhoLtf1KQ==",
|
||||
"version": "3.28.1",
|
||||
"resolved": "https://registry.npmjs.org/rollup/-/rollup-3.28.1.tgz",
|
||||
"integrity": "sha512-R9OMQmIHJm9znrU3m3cpE8uhN0fGdXiawME7aZIpQqvpS/85+Vt1Hq1/yVIcYfOmaQiHjvXkQAoJukvLpau6Yw==",
|
||||
"devOptional": true,
|
||||
"bin": {
|
||||
"rollup": "dist/bin/rollup"
|
||||
@ -569,9 +578,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/rollup-plugin-svelte": {
|
||||
"version": "7.1.4",
|
||||
"resolved": "https://registry.npmjs.org/rollup-plugin-svelte/-/rollup-plugin-svelte-7.1.4.tgz",
|
||||
"integrity": "sha512-Jm0FCydR7k8bBGe7wimXAes8x2zEK10Ew3f3lEZwYor/Zya3X0AZVeSAPRH7yiXB9hWQVzJu597EUeNwGDTdjQ==",
|
||||
"version": "7.1.6",
|
||||
"resolved": "https://registry.npmjs.org/rollup-plugin-svelte/-/rollup-plugin-svelte-7.1.6.tgz",
|
||||
"integrity": "sha512-nVFRBpGWI2qUY1OcSiEEA/kjCY2+vAjO9BI8SzA7NRrh2GTunLd6w2EYmnMt/atgdg8GvcNjLsmZmbQs/u4SQA==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@rollup/pluginutils": "^4.1.0",
|
||||
@ -618,10 +627,19 @@
|
||||
}
|
||||
]
|
||||
},
|
||||
"node_modules/serialize-javascript": {
|
||||
"version": "6.0.1",
|
||||
"resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.1.tgz",
|
||||
"integrity": "sha512-owoXEFjWRllis8/M1Q+Cw5k8ZH40e3zhp/ovX+Xr/vi1qj6QesbyXXViFbpNvWvPNAD62SutwEXavefrLJWj7w==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"randombytes": "^2.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/smob": {
|
||||
"version": "0.0.6",
|
||||
"resolved": "https://registry.npmjs.org/smob/-/smob-0.0.6.tgz",
|
||||
"integrity": "sha512-V21+XeNni+tTyiST1MHsa84AQhT1aFZipzPpOFAVB8DkHzwJyjjAmt9bgwnuZiZWnIbMo2duE29wybxv/7HWUw==",
|
||||
"version": "1.4.0",
|
||||
"resolved": "https://registry.npmjs.org/smob/-/smob-1.4.0.tgz",
|
||||
"integrity": "sha512-MqR3fVulhjWuRNSMydnTlweu38UhQ0HXM4buStD/S3mc/BzX3CuM9OmhyQpmtYCvoYdl5ris6TI0ZqH355Ymqg==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/source-map": {
|
||||
@ -656,32 +674,41 @@
|
||||
}
|
||||
},
|
||||
"node_modules/svelte": {
|
||||
"version": "3.58.0",
|
||||
"resolved": "https://registry.npmjs.org/svelte/-/svelte-3.58.0.tgz",
|
||||
"integrity": "sha512-brIBNNB76mXFmU/Kerm4wFnkskBbluBDCjx/8TcpYRb298Yh2dztS2kQ6bhtjMcvUhd5ynClfwpz5h2gnzdQ1A==",
|
||||
"version": "3.59.2",
|
||||
"resolved": "https://registry.npmjs.org/svelte/-/svelte-3.59.2.tgz",
|
||||
"integrity": "sha512-vzSyuGr3eEoAtT/A6bmajosJZIUWySzY2CzB3w2pgPvnkUjGqlDnsNnA0PMO+mMAhuyMul6C2uuZzY6ELSkzyA==",
|
||||
"engines": {
|
||||
"node": ">= 8"
|
||||
}
|
||||
},
|
||||
"node_modules/svelte-chartjs": {
|
||||
"version": "3.1.2",
|
||||
"resolved": "https://registry.npmjs.org/svelte-chartjs/-/svelte-chartjs-3.1.2.tgz",
|
||||
"integrity": "sha512-3+6gY2IJ9Ua8R9pk3iS1ypa7Z9OoXCJb9oPwIfTp7caJM+X+RrWnH2CTkGAq7FeSxc2nnmW08tYN88Q8Y+5M+w==",
|
||||
"peerDependencies": {
|
||||
"chart.js": "^3.5.0 || ^4.0.0",
|
||||
"svelte": "^3.45.0"
|
||||
}
|
||||
},
|
||||
"node_modules/sveltestrap": {
|
||||
"version": "5.10.0",
|
||||
"resolved": "https://registry.npmjs.org/sveltestrap/-/sveltestrap-5.10.0.tgz",
|
||||
"integrity": "sha512-k6Ob+6G2AMYvBidXHBKM9W28fJqFHbmosqCe/NC8pv6TV7K+v47Yw+zmnLWkjqCzzmjkSLkL48SrHZrlWc9mYQ==",
|
||||
"version": "5.11.1",
|
||||
"resolved": "https://registry.npmjs.org/sveltestrap/-/sveltestrap-5.11.1.tgz",
|
||||
"integrity": "sha512-FIvPIEU1VolqMN1wi2XrC8aehWVbIJEST7zPfPbOUUfPimyx9giN4nA3We5wkXrBUaifXA8CSIwuHFvf3CmYQw==",
|
||||
"dependencies": {
|
||||
"@popperjs/core": "^2.9.2"
|
||||
"@popperjs/core": "^2.11.8"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"svelte": "^3.29.0"
|
||||
"svelte": "^3.53.1"
|
||||
}
|
||||
},
|
||||
"node_modules/terser": {
|
||||
"version": "5.17.1",
|
||||
"resolved": "https://registry.npmjs.org/terser/-/terser-5.17.1.tgz",
|
||||
"integrity": "sha512-hVl35zClmpisy6oaoKALOpS0rDYLxRFLHhRuDlEGTKey9qHjS1w9GMORjuwIMt70Wan4lwsLYyWDVnWgF+KUEw==",
|
||||
"version": "5.19.2",
|
||||
"resolved": "https://registry.npmjs.org/terser/-/terser-5.19.2.tgz",
|
||||
"integrity": "sha512-qC5+dmecKJA4cpYxRa5aVkKehYsQKc+AHeKl0Oe62aYjBL8ZA33tTljktDHJSaxxMnbI5ZYw+o/S2DxxLu8OfA==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@jridgewell/source-map": "^0.3.2",
|
||||
"acorn": "^8.5.0",
|
||||
"@jridgewell/source-map": "^0.3.3",
|
||||
"acorn": "^8.8.2",
|
||||
"commander": "^2.20.0",
|
||||
"source-map-support": "~0.5.20"
|
||||
},
|
||||
@ -693,14 +720,14 @@
|
||||
}
|
||||
},
|
||||
"node_modules/uplot": {
|
||||
"version": "1.6.24",
|
||||
"resolved": "https://registry.npmjs.org/uplot/-/uplot-1.6.24.tgz",
|
||||
"integrity": "sha512-WpH2BsrFrqxkMu+4XBvc0eCDsRBhzoq9crttYeSI0bfxpzR5YoSVzZXOKFVWcVC7sp/aDXrdDPbDZGCtck2PVg=="
|
||||
"version": "1.6.25",
|
||||
"resolved": "https://registry.npmjs.org/uplot/-/uplot-1.6.25.tgz",
|
||||
"integrity": "sha512-eWLAhEaGtIcVBiS67mC2UC0yV+G6eYLS2rU67N4F2JVWjt7uBMg4xKXUYGW0dEz9G+m7fNatjCVXHts4gjyuMQ=="
|
||||
},
|
||||
"node_modules/wonka": {
|
||||
"version": "6.3.2",
|
||||
"resolved": "https://registry.npmjs.org/wonka/-/wonka-6.3.2.tgz",
|
||||
"integrity": "sha512-2xXbQ1LnwNS7egVm1HPhW2FyKrekolzhpM3mCwXdQr55gO+tAiY76rhb32OL9kKsW8taj++iP7C6hxlVzbnvrw=="
|
||||
"version": "6.3.4",
|
||||
"resolved": "https://registry.npmjs.org/wonka/-/wonka-6.3.4.tgz",
|
||||
"integrity": "sha512-CjpbqNtBGNAeyNS/9W6q3kSkKE52+FjIj7AkFlLr11s/VWGUu6a2CdYSdGxocIhIVjaW/zchesBQUKPVU69Cqg=="
|
||||
},
|
||||
"node_modules/wrappy": {
|
||||
"version": "1.0.2",
|
||||
|
@ -18,8 +18,10 @@
|
||||
"dependencies": {
|
||||
"@rollup/plugin-replace": "^5.0.2",
|
||||
"@urql/svelte": "^4.0.1",
|
||||
"chart.js": "^4.3.3",
|
||||
"graphql": "^16.6.0",
|
||||
"sveltestrap": "^5.10.0",
|
||||
"svelte-chartjs": "^3.1.2",
|
||||
"sveltestrap": "^5.11.1",
|
||||
"uplot": "^1.6.24",
|
||||
"wonka": "^6.3.2"
|
||||
}
|
||||
|
311
web/frontend/public/bootstrap-icons.css
vendored
311
web/frontend/public/bootstrap-icons.css
vendored
@ -1,7 +1,14 @@
|
||||
/*!
|
||||
* Bootstrap Icons v1.10.5 (https://icons.getbootstrap.com/)
|
||||
* Copyright 2019-2023 The Bootstrap Authors
|
||||
* Licensed under MIT (https://github.com/twbs/icons/blob/main/LICENSE)
|
||||
*/
|
||||
|
||||
@font-face {
|
||||
font-display: block;
|
||||
font-family: "bootstrap-icons";
|
||||
src: url("./fonts/bootstrap-icons.woff2?524846017b983fc8ded9325d94ed40f3") format("woff2"),
|
||||
url("./fonts/bootstrap-icons.woff?524846017b983fc8ded9325d94ed40f3") format("woff");
|
||||
src: url("./fonts/bootstrap-icons.woff2?1fa40e8900654d2863d011707b9fb6f2") format("woff2"),
|
||||
url("./fonts/bootstrap-icons.woff?1fa40e8900654d2863d011707b9fb6f2") format("woff");
|
||||
}
|
||||
|
||||
.bi::before,
|
||||
@ -440,7 +447,6 @@ url("./fonts/bootstrap-icons.woff?524846017b983fc8ded9325d94ed40f3") format("wof
|
||||
.bi-cloud-fog2::before { content: "\f2a2"; }
|
||||
.bi-cloud-hail-fill::before { content: "\f2a3"; }
|
||||
.bi-cloud-hail::before { content: "\f2a4"; }
|
||||
.bi-cloud-haze-1::before { content: "\f2a5"; }
|
||||
.bi-cloud-haze-fill::before { content: "\f2a6"; }
|
||||
.bi-cloud-haze::before { content: "\f2a7"; }
|
||||
.bi-cloud-haze2-fill::before { content: "\f2a8"; }
|
||||
@ -1436,21 +1442,16 @@ url("./fonts/bootstrap-icons.woff?524846017b983fc8ded9325d94ed40f3") format("wof
|
||||
.bi-dpad::before { content: "\f687"; }
|
||||
.bi-ear-fill::before { content: "\f688"; }
|
||||
.bi-ear::before { content: "\f689"; }
|
||||
.bi-envelope-check-1::before { content: "\f68a"; }
|
||||
.bi-envelope-check-fill::before { content: "\f68b"; }
|
||||
.bi-envelope-check::before { content: "\f68c"; }
|
||||
.bi-envelope-dash-1::before { content: "\f68d"; }
|
||||
.bi-envelope-dash-fill::before { content: "\f68e"; }
|
||||
.bi-envelope-dash::before { content: "\f68f"; }
|
||||
.bi-envelope-exclamation-1::before { content: "\f690"; }
|
||||
.bi-envelope-exclamation-fill::before { content: "\f691"; }
|
||||
.bi-envelope-exclamation::before { content: "\f692"; }
|
||||
.bi-envelope-plus-fill::before { content: "\f693"; }
|
||||
.bi-envelope-plus::before { content: "\f694"; }
|
||||
.bi-envelope-slash-1::before { content: "\f695"; }
|
||||
.bi-envelope-slash-fill::before { content: "\f696"; }
|
||||
.bi-envelope-slash::before { content: "\f697"; }
|
||||
.bi-envelope-x-1::before { content: "\f698"; }
|
||||
.bi-envelope-x-fill::before { content: "\f699"; }
|
||||
.bi-envelope-x::before { content: "\f69a"; }
|
||||
.bi-explicit-fill::before { content: "\f69b"; }
|
||||
@ -1460,8 +1461,6 @@ url("./fonts/bootstrap-icons.woff?524846017b983fc8ded9325d94ed40f3") format("wof
|
||||
.bi-list-columns-reverse::before { content: "\f69f"; }
|
||||
.bi-list-columns::before { content: "\f6a0"; }
|
||||
.bi-meta::before { content: "\f6a1"; }
|
||||
.bi-mortorboard-fill::before { content: "\f6a2"; }
|
||||
.bi-mortorboard::before { content: "\f6a3"; }
|
||||
.bi-nintendo-switch::before { content: "\f6a4"; }
|
||||
.bi-pc-display-horizontal::before { content: "\f6a5"; }
|
||||
.bi-pc-display::before { content: "\f6a6"; }
|
||||
@ -1480,7 +1479,6 @@ url("./fonts/bootstrap-icons.woff?524846017b983fc8ded9325d94ed40f3") format("wof
|
||||
.bi-send-check::before { content: "\f6b3"; }
|
||||
.bi-send-dash-fill::before { content: "\f6b4"; }
|
||||
.bi-send-dash::before { content: "\f6b5"; }
|
||||
.bi-send-exclamation-1::before { content: "\f6b6"; }
|
||||
.bi-send-exclamation-fill::before { content: "\f6b7"; }
|
||||
.bi-send-exclamation::before { content: "\f6b8"; }
|
||||
.bi-send-fill::before { content: "\f6b9"; }
|
||||
@ -1492,7 +1490,6 @@ url("./fonts/bootstrap-icons.woff?524846017b983fc8ded9325d94ed40f3") format("wof
|
||||
.bi-send-x::before { content: "\f6bf"; }
|
||||
.bi-send::before { content: "\f6c0"; }
|
||||
.bi-steam::before { content: "\f6c1"; }
|
||||
.bi-terminal-dash-1::before { content: "\f6c2"; }
|
||||
.bi-terminal-dash::before { content: "\f6c3"; }
|
||||
.bi-terminal-plus::before { content: "\f6c4"; }
|
||||
.bi-terminal-split::before { content: "\f6c5"; }
|
||||
@ -1522,7 +1519,6 @@ url("./fonts/bootstrap-icons.woff?524846017b983fc8ded9325d94ed40f3") format("wof
|
||||
.bi-usb-symbol::before { content: "\f6dd"; }
|
||||
.bi-usb::before { content: "\f6de"; }
|
||||
.bi-boombox-fill::before { content: "\f6df"; }
|
||||
.bi-displayport-1::before { content: "\f6e0"; }
|
||||
.bi-displayport::before { content: "\f6e1"; }
|
||||
.bi-gpu-card::before { content: "\f6e2"; }
|
||||
.bi-memory::before { content: "\f6e3"; }
|
||||
@ -1535,8 +1531,6 @@ url("./fonts/bootstrap-icons.woff?524846017b983fc8ded9325d94ed40f3") format("wof
|
||||
.bi-pci-card::before { content: "\f6ea"; }
|
||||
.bi-router-fill::before { content: "\f6eb"; }
|
||||
.bi-router::before { content: "\f6ec"; }
|
||||
.bi-ssd-fill::before { content: "\f6ed"; }
|
||||
.bi-ssd::before { content: "\f6ee"; }
|
||||
.bi-thunderbolt-fill::before { content: "\f6ef"; }
|
||||
.bi-thunderbolt::before { content: "\f6f0"; }
|
||||
.bi-usb-drive-fill::before { content: "\f6f1"; }
|
||||
@ -1643,7 +1637,6 @@ url("./fonts/bootstrap-icons.woff?524846017b983fc8ded9325d94ed40f3") format("wof
|
||||
.bi-filetype-pdf::before { content: "\f756"; }
|
||||
.bi-filetype-php::before { content: "\f757"; }
|
||||
.bi-filetype-png::before { content: "\f758"; }
|
||||
.bi-filetype-ppt-1::before { content: "\f759"; }
|
||||
.bi-filetype-ppt::before { content: "\f75a"; }
|
||||
.bi-filetype-psd::before { content: "\f75b"; }
|
||||
.bi-filetype-py::before { content: "\f75c"; }
|
||||
@ -1659,7 +1652,6 @@ url("./fonts/bootstrap-icons.woff?524846017b983fc8ded9325d94ed40f3") format("wof
|
||||
.bi-filetype-txt::before { content: "\f766"; }
|
||||
.bi-filetype-wav::before { content: "\f767"; }
|
||||
.bi-filetype-woff::before { content: "\f768"; }
|
||||
.bi-filetype-xls-1::before { content: "\f769"; }
|
||||
.bi-filetype-xls::before { content: "\f76a"; }
|
||||
.bi-filetype-xml::before { content: "\f76b"; }
|
||||
.bi-filetype-yml::before { content: "\f76c"; }
|
||||
@ -1702,3 +1694,288 @@ url("./fonts/bootstrap-icons.woff?524846017b983fc8ded9325d94ed40f3") format("wof
|
||||
.bi-filetype-json::before { content: "\f791"; }
|
||||
.bi-filetype-pptx::before { content: "\f792"; }
|
||||
.bi-filetype-xlsx::before { content: "\f793"; }
|
||||
.bi-1-circle-fill::before { content: "\f796"; }
|
||||
.bi-1-circle::before { content: "\f797"; }
|
||||
.bi-1-square-fill::before { content: "\f798"; }
|
||||
.bi-1-square::before { content: "\f799"; }
|
||||
.bi-2-circle-fill::before { content: "\f79c"; }
|
||||
.bi-2-circle::before { content: "\f79d"; }
|
||||
.bi-2-square-fill::before { content: "\f79e"; }
|
||||
.bi-2-square::before { content: "\f79f"; }
|
||||
.bi-3-circle-fill::before { content: "\f7a2"; }
|
||||
.bi-3-circle::before { content: "\f7a3"; }
|
||||
.bi-3-square-fill::before { content: "\f7a4"; }
|
||||
.bi-3-square::before { content: "\f7a5"; }
|
||||
.bi-4-circle-fill::before { content: "\f7a8"; }
|
||||
.bi-4-circle::before { content: "\f7a9"; }
|
||||
.bi-4-square-fill::before { content: "\f7aa"; }
|
||||
.bi-4-square::before { content: "\f7ab"; }
|
||||
.bi-5-circle-fill::before { content: "\f7ae"; }
|
||||
.bi-5-circle::before { content: "\f7af"; }
|
||||
.bi-5-square-fill::before { content: "\f7b0"; }
|
||||
.bi-5-square::before { content: "\f7b1"; }
|
||||
.bi-6-circle-fill::before { content: "\f7b4"; }
|
||||
.bi-6-circle::before { content: "\f7b5"; }
|
||||
.bi-6-square-fill::before { content: "\f7b6"; }
|
||||
.bi-6-square::before { content: "\f7b7"; }
|
||||
.bi-7-circle-fill::before { content: "\f7ba"; }
|
||||
.bi-7-circle::before { content: "\f7bb"; }
|
||||
.bi-7-square-fill::before { content: "\f7bc"; }
|
||||
.bi-7-square::before { content: "\f7bd"; }
|
||||
.bi-8-circle-fill::before { content: "\f7c0"; }
|
||||
.bi-8-circle::before { content: "\f7c1"; }
|
||||
.bi-8-square-fill::before { content: "\f7c2"; }
|
||||
.bi-8-square::before { content: "\f7c3"; }
|
||||
.bi-9-circle-fill::before { content: "\f7c6"; }
|
||||
.bi-9-circle::before { content: "\f7c7"; }
|
||||
.bi-9-square-fill::before { content: "\f7c8"; }
|
||||
.bi-9-square::before { content: "\f7c9"; }
|
||||
.bi-airplane-engines-fill::before { content: "\f7ca"; }
|
||||
.bi-airplane-engines::before { content: "\f7cb"; }
|
||||
.bi-airplane-fill::before { content: "\f7cc"; }
|
||||
.bi-airplane::before { content: "\f7cd"; }
|
||||
.bi-alexa::before { content: "\f7ce"; }
|
||||
.bi-alipay::before { content: "\f7cf"; }
|
||||
.bi-android::before { content: "\f7d0"; }
|
||||
.bi-android2::before { content: "\f7d1"; }
|
||||
.bi-box-fill::before { content: "\f7d2"; }
|
||||
.bi-box-seam-fill::before { content: "\f7d3"; }
|
||||
.bi-browser-chrome::before { content: "\f7d4"; }
|
||||
.bi-browser-edge::before { content: "\f7d5"; }
|
||||
.bi-browser-firefox::before { content: "\f7d6"; }
|
||||
.bi-browser-safari::before { content: "\f7d7"; }
|
||||
.bi-c-circle-fill::before { content: "\f7da"; }
|
||||
.bi-c-circle::before { content: "\f7db"; }
|
||||
.bi-c-square-fill::before { content: "\f7dc"; }
|
||||
.bi-c-square::before { content: "\f7dd"; }
|
||||
.bi-capsule-pill::before { content: "\f7de"; }
|
||||
.bi-capsule::before { content: "\f7df"; }
|
||||
.bi-car-front-fill::before { content: "\f7e0"; }
|
||||
.bi-car-front::before { content: "\f7e1"; }
|
||||
.bi-cassette-fill::before { content: "\f7e2"; }
|
||||
.bi-cassette::before { content: "\f7e3"; }
|
||||
.bi-cc-circle-fill::before { content: "\f7e6"; }
|
||||
.bi-cc-circle::before { content: "\f7e7"; }
|
||||
.bi-cc-square-fill::before { content: "\f7e8"; }
|
||||
.bi-cc-square::before { content: "\f7e9"; }
|
||||
.bi-cup-hot-fill::before { content: "\f7ea"; }
|
||||
.bi-cup-hot::before { content: "\f7eb"; }
|
||||
.bi-currency-rupee::before { content: "\f7ec"; }
|
||||
.bi-dropbox::before { content: "\f7ed"; }
|
||||
.bi-escape::before { content: "\f7ee"; }
|
||||
.bi-fast-forward-btn-fill::before { content: "\f7ef"; }
|
||||
.bi-fast-forward-btn::before { content: "\f7f0"; }
|
||||
.bi-fast-forward-circle-fill::before { content: "\f7f1"; }
|
||||
.bi-fast-forward-circle::before { content: "\f7f2"; }
|
||||
.bi-fast-forward-fill::before { content: "\f7f3"; }
|
||||
.bi-fast-forward::before { content: "\f7f4"; }
|
||||
.bi-filetype-sql::before { content: "\f7f5"; }
|
||||
.bi-fire::before { content: "\f7f6"; }
|
||||
.bi-google-play::before { content: "\f7f7"; }
|
||||
.bi-h-circle-fill::before { content: "\f7fa"; }
|
||||
.bi-h-circle::before { content: "\f7fb"; }
|
||||
.bi-h-square-fill::before { content: "\f7fc"; }
|
||||
.bi-h-square::before { content: "\f7fd"; }
|
||||
.bi-indent::before { content: "\f7fe"; }
|
||||
.bi-lungs-fill::before { content: "\f7ff"; }
|
||||
.bi-lungs::before { content: "\f800"; }
|
||||
.bi-microsoft-teams::before { content: "\f801"; }
|
||||
.bi-p-circle-fill::before { content: "\f804"; }
|
||||
.bi-p-circle::before { content: "\f805"; }
|
||||
.bi-p-square-fill::before { content: "\f806"; }
|
||||
.bi-p-square::before { content: "\f807"; }
|
||||
.bi-pass-fill::before { content: "\f808"; }
|
||||
.bi-pass::before { content: "\f809"; }
|
||||
.bi-prescription::before { content: "\f80a"; }
|
||||
.bi-prescription2::before { content: "\f80b"; }
|
||||
.bi-r-circle-fill::before { content: "\f80e"; }
|
||||
.bi-r-circle::before { content: "\f80f"; }
|
||||
.bi-r-square-fill::before { content: "\f810"; }
|
||||
.bi-r-square::before { content: "\f811"; }
|
||||
.bi-repeat-1::before { content: "\f812"; }
|
||||
.bi-repeat::before { content: "\f813"; }
|
||||
.bi-rewind-btn-fill::before { content: "\f814"; }
|
||||
.bi-rewind-btn::before { content: "\f815"; }
|
||||
.bi-rewind-circle-fill::before { content: "\f816"; }
|
||||
.bi-rewind-circle::before { content: "\f817"; }
|
||||
.bi-rewind-fill::before { content: "\f818"; }
|
||||
.bi-rewind::before { content: "\f819"; }
|
||||
.bi-train-freight-front-fill::before { content: "\f81a"; }
|
||||
.bi-train-freight-front::before { content: "\f81b"; }
|
||||
.bi-train-front-fill::before { content: "\f81c"; }
|
||||
.bi-train-front::before { content: "\f81d"; }
|
||||
.bi-train-lightrail-front-fill::before { content: "\f81e"; }
|
||||
.bi-train-lightrail-front::before { content: "\f81f"; }
|
||||
.bi-truck-front-fill::before { content: "\f820"; }
|
||||
.bi-truck-front::before { content: "\f821"; }
|
||||
.bi-ubuntu::before { content: "\f822"; }
|
||||
.bi-unindent::before { content: "\f823"; }
|
||||
.bi-unity::before { content: "\f824"; }
|
||||
.bi-universal-access-circle::before { content: "\f825"; }
|
||||
.bi-universal-access::before { content: "\f826"; }
|
||||
.bi-virus::before { content: "\f827"; }
|
||||
.bi-virus2::before { content: "\f828"; }
|
||||
.bi-wechat::before { content: "\f829"; }
|
||||
.bi-yelp::before { content: "\f82a"; }
|
||||
.bi-sign-stop-fill::before { content: "\f82b"; }
|
||||
.bi-sign-stop-lights-fill::before { content: "\f82c"; }
|
||||
.bi-sign-stop-lights::before { content: "\f82d"; }
|
||||
.bi-sign-stop::before { content: "\f82e"; }
|
||||
.bi-sign-turn-left-fill::before { content: "\f82f"; }
|
||||
.bi-sign-turn-left::before { content: "\f830"; }
|
||||
.bi-sign-turn-right-fill::before { content: "\f831"; }
|
||||
.bi-sign-turn-right::before { content: "\f832"; }
|
||||
.bi-sign-turn-slight-left-fill::before { content: "\f833"; }
|
||||
.bi-sign-turn-slight-left::before { content: "\f834"; }
|
||||
.bi-sign-turn-slight-right-fill::before { content: "\f835"; }
|
||||
.bi-sign-turn-slight-right::before { content: "\f836"; }
|
||||
.bi-sign-yield-fill::before { content: "\f837"; }
|
||||
.bi-sign-yield::before { content: "\f838"; }
|
||||
.bi-ev-station-fill::before { content: "\f839"; }
|
||||
.bi-ev-station::before { content: "\f83a"; }
|
||||
.bi-fuel-pump-diesel-fill::before { content: "\f83b"; }
|
||||
.bi-fuel-pump-diesel::before { content: "\f83c"; }
|
||||
.bi-fuel-pump-fill::before { content: "\f83d"; }
|
||||
.bi-fuel-pump::before { content: "\f83e"; }
|
||||
.bi-0-circle-fill::before { content: "\f83f"; }
|
||||
.bi-0-circle::before { content: "\f840"; }
|
||||
.bi-0-square-fill::before { content: "\f841"; }
|
||||
.bi-0-square::before { content: "\f842"; }
|
||||
.bi-rocket-fill::before { content: "\f843"; }
|
||||
.bi-rocket-takeoff-fill::before { content: "\f844"; }
|
||||
.bi-rocket-takeoff::before { content: "\f845"; }
|
||||
.bi-rocket::before { content: "\f846"; }
|
||||
.bi-stripe::before { content: "\f847"; }
|
||||
.bi-subscript::before { content: "\f848"; }
|
||||
.bi-superscript::before { content: "\f849"; }
|
||||
.bi-trello::before { content: "\f84a"; }
|
||||
.bi-envelope-at-fill::before { content: "\f84b"; }
|
||||
.bi-envelope-at::before { content: "\f84c"; }
|
||||
.bi-regex::before { content: "\f84d"; }
|
||||
.bi-text-wrap::before { content: "\f84e"; }
|
||||
.bi-sign-dead-end-fill::before { content: "\f84f"; }
|
||||
.bi-sign-dead-end::before { content: "\f850"; }
|
||||
.bi-sign-do-not-enter-fill::before { content: "\f851"; }
|
||||
.bi-sign-do-not-enter::before { content: "\f852"; }
|
||||
.bi-sign-intersection-fill::before { content: "\f853"; }
|
||||
.bi-sign-intersection-side-fill::before { content: "\f854"; }
|
||||
.bi-sign-intersection-side::before { content: "\f855"; }
|
||||
.bi-sign-intersection-t-fill::before { content: "\f856"; }
|
||||
.bi-sign-intersection-t::before { content: "\f857"; }
|
||||
.bi-sign-intersection-y-fill::before { content: "\f858"; }
|
||||
.bi-sign-intersection-y::before { content: "\f859"; }
|
||||
.bi-sign-intersection::before { content: "\f85a"; }
|
||||
.bi-sign-merge-left-fill::before { content: "\f85b"; }
|
||||
.bi-sign-merge-left::before { content: "\f85c"; }
|
||||
.bi-sign-merge-right-fill::before { content: "\f85d"; }
|
||||
.bi-sign-merge-right::before { content: "\f85e"; }
|
||||
.bi-sign-no-left-turn-fill::before { content: "\f85f"; }
|
||||
.bi-sign-no-left-turn::before { content: "\f860"; }
|
||||
.bi-sign-no-parking-fill::before { content: "\f861"; }
|
||||
.bi-sign-no-parking::before { content: "\f862"; }
|
||||
.bi-sign-no-right-turn-fill::before { content: "\f863"; }
|
||||
.bi-sign-no-right-turn::before { content: "\f864"; }
|
||||
.bi-sign-railroad-fill::before { content: "\f865"; }
|
||||
.bi-sign-railroad::before { content: "\f866"; }
|
||||
.bi-building-add::before { content: "\f867"; }
|
||||
.bi-building-check::before { content: "\f868"; }
|
||||
.bi-building-dash::before { content: "\f869"; }
|
||||
.bi-building-down::before { content: "\f86a"; }
|
||||
.bi-building-exclamation::before { content: "\f86b"; }
|
||||
.bi-building-fill-add::before { content: "\f86c"; }
|
||||
.bi-building-fill-check::before { content: "\f86d"; }
|
||||
.bi-building-fill-dash::before { content: "\f86e"; }
|
||||
.bi-building-fill-down::before { content: "\f86f"; }
|
||||
.bi-building-fill-exclamation::before { content: "\f870"; }
|
||||
.bi-building-fill-gear::before { content: "\f871"; }
|
||||
.bi-building-fill-lock::before { content: "\f872"; }
|
||||
.bi-building-fill-slash::before { content: "\f873"; }
|
||||
.bi-building-fill-up::before { content: "\f874"; }
|
||||
.bi-building-fill-x::before { content: "\f875"; }
|
||||
.bi-building-fill::before { content: "\f876"; }
|
||||
.bi-building-gear::before { content: "\f877"; }
|
||||
.bi-building-lock::before { content: "\f878"; }
|
||||
.bi-building-slash::before { content: "\f879"; }
|
||||
.bi-building-up::before { content: "\f87a"; }
|
||||
.bi-building-x::before { content: "\f87b"; }
|
||||
.bi-buildings-fill::before { content: "\f87c"; }
|
||||
.bi-buildings::before { content: "\f87d"; }
|
||||
.bi-bus-front-fill::before { content: "\f87e"; }
|
||||
.bi-bus-front::before { content: "\f87f"; }
|
||||
.bi-ev-front-fill::before { content: "\f880"; }
|
||||
.bi-ev-front::before { content: "\f881"; }
|
||||
.bi-globe-americas::before { content: "\f882"; }
|
||||
.bi-globe-asia-australia::before { content: "\f883"; }
|
||||
.bi-globe-central-south-asia::before { content: "\f884"; }
|
||||
.bi-globe-europe-africa::before { content: "\f885"; }
|
||||
.bi-house-add-fill::before { content: "\f886"; }
|
||||
.bi-house-add::before { content: "\f887"; }
|
||||
.bi-house-check-fill::before { content: "\f888"; }
|
||||
.bi-house-check::before { content: "\f889"; }
|
||||
.bi-house-dash-fill::before { content: "\f88a"; }
|
||||
.bi-house-dash::before { content: "\f88b"; }
|
||||
.bi-house-down-fill::before { content: "\f88c"; }
|
||||
.bi-house-down::before { content: "\f88d"; }
|
||||
.bi-house-exclamation-fill::before { content: "\f88e"; }
|
||||
.bi-house-exclamation::before { content: "\f88f"; }
|
||||
.bi-house-gear-fill::before { content: "\f890"; }
|
||||
.bi-house-gear::before { content: "\f891"; }
|
||||
.bi-house-lock-fill::before { content: "\f892"; }
|
||||
.bi-house-lock::before { content: "\f893"; }
|
||||
.bi-house-slash-fill::before { content: "\f894"; }
|
||||
.bi-house-slash::before { content: "\f895"; }
|
||||
.bi-house-up-fill::before { content: "\f896"; }
|
||||
.bi-house-up::before { content: "\f897"; }
|
||||
.bi-house-x-fill::before { content: "\f898"; }
|
||||
.bi-house-x::before { content: "\f899"; }
|
||||
.bi-person-add::before { content: "\f89a"; }
|
||||
.bi-person-down::before { content: "\f89b"; }
|
||||
.bi-person-exclamation::before { content: "\f89c"; }
|
||||
.bi-person-fill-add::before { content: "\f89d"; }
|
||||
.bi-person-fill-check::before { content: "\f89e"; }
|
||||
.bi-person-fill-dash::before { content: "\f89f"; }
|
||||
.bi-person-fill-down::before { content: "\f8a0"; }
|
||||
.bi-person-fill-exclamation::before { content: "\f8a1"; }
|
||||
.bi-person-fill-gear::before { content: "\f8a2"; }
|
||||
.bi-person-fill-lock::before { content: "\f8a3"; }
|
||||
.bi-person-fill-slash::before { content: "\f8a4"; }
|
||||
.bi-person-fill-up::before { content: "\f8a5"; }
|
||||
.bi-person-fill-x::before { content: "\f8a6"; }
|
||||
.bi-person-gear::before { content: "\f8a7"; }
|
||||
.bi-person-lock::before { content: "\f8a8"; }
|
||||
.bi-person-slash::before { content: "\f8a9"; }
|
||||
.bi-person-up::before { content: "\f8aa"; }
|
||||
.bi-scooter::before { content: "\f8ab"; }
|
||||
.bi-taxi-front-fill::before { content: "\f8ac"; }
|
||||
.bi-taxi-front::before { content: "\f8ad"; }
|
||||
.bi-amd::before { content: "\f8ae"; }
|
||||
.bi-database-add::before { content: "\f8af"; }
|
||||
.bi-database-check::before { content: "\f8b0"; }
|
||||
.bi-database-dash::before { content: "\f8b1"; }
|
||||
.bi-database-down::before { content: "\f8b2"; }
|
||||
.bi-database-exclamation::before { content: "\f8b3"; }
|
||||
.bi-database-fill-add::before { content: "\f8b4"; }
|
||||
.bi-database-fill-check::before { content: "\f8b5"; }
|
||||
.bi-database-fill-dash::before { content: "\f8b6"; }
|
||||
.bi-database-fill-down::before { content: "\f8b7"; }
|
||||
.bi-database-fill-exclamation::before { content: "\f8b8"; }
|
||||
.bi-database-fill-gear::before { content: "\f8b9"; }
|
||||
.bi-database-fill-lock::before { content: "\f8ba"; }
|
||||
.bi-database-fill-slash::before { content: "\f8bb"; }
|
||||
.bi-database-fill-up::before { content: "\f8bc"; }
|
||||
.bi-database-fill-x::before { content: "\f8bd"; }
|
||||
.bi-database-fill::before { content: "\f8be"; }
|
||||
.bi-database-gear::before { content: "\f8bf"; }
|
||||
.bi-database-lock::before { content: "\f8c0"; }
|
||||
.bi-database-slash::before { content: "\f8c1"; }
|
||||
.bi-database-up::before { content: "\f8c2"; }
|
||||
.bi-database-x::before { content: "\f8c3"; }
|
||||
.bi-database::before { content: "\f8c4"; }
|
||||
.bi-houses-fill::before { content: "\f8c5"; }
|
||||
.bi-houses::before { content: "\f8c6"; }
|
||||
.bi-nvidia::before { content: "\f8c7"; }
|
||||
.bi-person-vcard-fill::before { content: "\f8c8"; }
|
||||
.bi-person-vcard::before { content: "\f8c9"; }
|
||||
.bi-sina-weibo::before { content: "\f8ca"; }
|
||||
.bi-tencent-qq::before { content: "\f8cb"; }
|
||||
.bi-wikipedia::before { content: "\f8cc"; }
|
||||
|
7
web/frontend/public/bootstrap.min.css
vendored
7
web/frontend/public/bootstrap.min.css
vendored
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
Binary file not shown.
@ -1,11 +1,13 @@
|
||||
<script>
|
||||
import { init } from './utils.js'
|
||||
import { init, convert2uplot } from './utils.js'
|
||||
import { getContext, onMount } from 'svelte'
|
||||
import { queryStore, gql, getContextClient } from '@urql/svelte'
|
||||
import { Row, Col, Spinner, Card, Table } from 'sveltestrap'
|
||||
import { queryStore, gql, getContextClient, mutationStore } from '@urql/svelte'
|
||||
import { Row, Col, Spinner, Card, Table, Icon } from 'sveltestrap'
|
||||
import Filters from './filters/Filters.svelte'
|
||||
import PlotSelection from './PlotSelection.svelte'
|
||||
import Histogram, { binsFromFootprint } from './plots/Histogram.svelte'
|
||||
import Histogram from './plots/Histogram.svelte'
|
||||
import Pie, { colors } from './plots/Pie.svelte'
|
||||
import { binsFromFootprint } from './utils.js'
|
||||
import ScatterPlot from './plots/Scatter.svelte'
|
||||
import PlotTable from './PlotTable.svelte'
|
||||
import Roofline from './plots/Roofline.svelte'
|
||||
@ -29,7 +31,7 @@
|
||||
let filterComponent; // see why here: https://stackoverflow.com/questions/58287729/how-can-i-export-a-function-from-a-svelte-component-that-changes-a-value-in-the
|
||||
let jobFilters = [];
|
||||
let rooflineMaxY;
|
||||
let colWidth;
|
||||
let colWidth1, colWidth2, colWidth3, colWidth4;
|
||||
let numBins = 50;
|
||||
let maxY = -1;
|
||||
const ccconfig = getContext('cc-config')
|
||||
@ -40,6 +42,20 @@
|
||||
|
||||
$: metrics = [...new Set([...metricsInHistograms, ...metricsInScatterplots.flat()])]
|
||||
|
||||
const sortOptions = [
|
||||
{key: 'totalWalltime', label: 'Walltime'},
|
||||
{key: 'totalNodeHours', label: 'Node Hours'},
|
||||
{key: 'totalCoreHours', label: 'Core Hours'},
|
||||
{key: 'totalAccHours', label: 'Accelerator Hours'}
|
||||
]
|
||||
const groupOptions = [
|
||||
{key: 'user', label: 'User Name'},
|
||||
{key: 'project', label: 'Project ID'}
|
||||
]
|
||||
|
||||
let sortSelection = sortOptions.find((option) => option.key == ccconfig[`analysis_view_selectedTopCategory:${filterPresets.cluster}`]) || sortOptions.find((option) => option.key == ccconfig.analysis_view_selectedTopCategory)
|
||||
let groupSelection = groupOptions.find((option) => option.key == ccconfig[`analysis_view_selectedTopEntity:${filterPresets.cluster}`]) || groupOptions.find((option) => option.key == ccconfig.analysis_view_selectedTopEntity)
|
||||
|
||||
getContext('on-init')(({ data }) => {
|
||||
if (data != null) {
|
||||
cluster = data.clusters.find(c => c.name == filterPresets.cluster)
|
||||
@ -60,23 +76,39 @@
|
||||
totalJobs
|
||||
shortJobs
|
||||
totalWalltime
|
||||
totalNodeHours
|
||||
totalCoreHours
|
||||
totalAccHours
|
||||
histDuration { count, value }
|
||||
histNumNodes { count, value }
|
||||
histNumCores { count, value }
|
||||
}
|
||||
|
||||
topUsers: jobsCount(filter: $jobFilters, groupBy: USER, weight: NODE_HOURS, limit: 5) { name, count }
|
||||
}
|
||||
`,
|
||||
variables: { jobFilters }
|
||||
})
|
||||
|
||||
$: topQuery = queryStore({
|
||||
client: client,
|
||||
query: gql`
|
||||
query($jobFilters: [JobFilter!]!, $paging: PageRequest!, $sortBy: SortByAggregate!, $groupBy: Aggregate!) {
|
||||
topList: jobsStatistics(filter: $jobFilters, page: $paging, sortBy: $sortBy, groupBy: $groupBy) {
|
||||
id
|
||||
totalWalltime
|
||||
totalNodeHours
|
||||
totalCoreHours
|
||||
totalAccHours
|
||||
}
|
||||
}
|
||||
`,
|
||||
variables: { jobFilters, paging: { itemsPerPage: 10, page: 1 }, sortBy: sortSelection.key.toUpperCase(), groupBy: groupSelection.key.toUpperCase() }
|
||||
})
|
||||
|
||||
$: footprintsQuery = queryStore({
|
||||
client: client,
|
||||
query: gql`
|
||||
query($jobFilters: [JobFilter!]!, $metrics: [String!]!) {
|
||||
footprints: jobsFootprints(filter: $jobFilters, metrics: $metrics) {
|
||||
nodehours,
|
||||
timeWeights { nodeHours, accHours, coreHours },
|
||||
metrics { metric, data }
|
||||
}
|
||||
}`,
|
||||
@ -95,6 +127,53 @@
|
||||
variables: { jobFilters, rows: 50, cols: 50, minX: 0.01, minY: 1., maxX: 1000., maxY }
|
||||
})
|
||||
|
||||
const updateConfigurationMutation = ({ name, value }) => {
|
||||
return mutationStore({
|
||||
client: client,
|
||||
query: gql`
|
||||
mutation ($name: String!, $value: String!) {
|
||||
updateConfiguration(name: $name, value: $value)
|
||||
}
|
||||
`,
|
||||
variables: { name, value }
|
||||
});
|
||||
}
|
||||
|
||||
function updateEntityConfiguration(select) {
|
||||
if (ccconfig[`analysis_view_selectedTopEntity:${filterPresets.cluster}`] != select) {
|
||||
updateConfigurationMutation({ name: `analysis_view_selectedTopEntity:${filterPresets.cluster}`, value: JSON.stringify(select) })
|
||||
.subscribe(res => {
|
||||
if (res.fetching === false && !res.error) {
|
||||
// console.log(`analysis_view_selectedTopEntity:${filterPresets.cluster}` + ' -> Updated!')
|
||||
} else if (res.fetching === false && res.error) {
|
||||
throw res.error
|
||||
}
|
||||
})
|
||||
} else {
|
||||
// console.log('No Mutation Required: Entity')
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
function updateCategoryConfiguration(select) {
|
||||
if (ccconfig[`analysis_view_selectedTopCategory:${filterPresets.cluster}`] != select) {
|
||||
updateConfigurationMutation({ name: `analysis_view_selectedTopCategory:${filterPresets.cluster}`, value: JSON.stringify(select) })
|
||||
.subscribe(res => {
|
||||
if (res.fetching === false && !res.error) {
|
||||
// console.log(`analysis_view_selectedTopCategory:${filterPresets.cluster}` + ' -> Updated!')
|
||||
} else if (res.fetching === false && res.error) {
|
||||
throw res.error
|
||||
}
|
||||
})
|
||||
} else {
|
||||
// console.log('No Mutation Required: Category')
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
$: updateEntityConfiguration(groupSelection.key)
|
||||
$: updateCategoryConfiguration(sortSelection.key)
|
||||
|
||||
onMount(() => filterComponent.update())
|
||||
</script>
|
||||
|
||||
@ -134,78 +213,150 @@
|
||||
</Col>
|
||||
</Row>
|
||||
{:else if $statsQuery.data}
|
||||
<Row>
|
||||
<div class="col-3" bind:clientWidth={colWidth}>
|
||||
<div style="height: 40%">
|
||||
<Table>
|
||||
<tr>
|
||||
<th scope="col">Total Jobs</th>
|
||||
<td>{$statsQuery.data.stats[0].totalJobs}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<th scope="col">Short Jobs</th>
|
||||
<td>{$statsQuery.data.stats[0].shortJobs}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<th scope="col">Total Walltime</th>
|
||||
<td>{$statsQuery.data.stats[0].totalWalltime}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<th scope="col">Total Core Hours</th>
|
||||
<td>{$statsQuery.data.stats[0].totalCoreHours}</td>
|
||||
</tr>
|
||||
</Table>
|
||||
</div>
|
||||
<div style="height: 60%;">
|
||||
{#key $statsQuery.data.topUsers}
|
||||
<h4>Top Users (by node hours)</h4>
|
||||
<Histogram
|
||||
width={colWidth - 25} height={300 * 0.5} small={true}
|
||||
data={$statsQuery.data.topUsers.sort((a, b) => b.count - a.count).map(({ count }, idx) => ({ count, value: idx }))}
|
||||
label={(x) => x < $statsQuery.data.topUsers.length ? $statsQuery.data.topUsers[Math.floor(x)].name : 'No Users'}
|
||||
ylabel="Node Hours [h]"/>
|
||||
{/key}
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-3">
|
||||
{#key $statsQuery.data.stats[0].histDuration}
|
||||
<h4>Duration Distribution</h4>
|
||||
<Histogram
|
||||
width={colWidth - 25}
|
||||
data={$statsQuery.data.stats[0].histDuration}
|
||||
xlabel="Current Runtimes [h]"
|
||||
ylabel="Number of Jobs"/>
|
||||
<Row cols={3} class="mb-4">
|
||||
<Col>
|
||||
<Table>
|
||||
<tr>
|
||||
<th scope="col">Total Jobs</th>
|
||||
<td>{$statsQuery.data.stats[0].totalJobs}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<th scope="col">Short Jobs</th>
|
||||
<td>{$statsQuery.data.stats[0].shortJobs}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<th scope="col">Total Walltime</th>
|
||||
<td>{$statsQuery.data.stats[0].totalWalltime}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<th scope="col">Total Node Hours</th>
|
||||
<td>{$statsQuery.data.stats[0].totalNodeHours}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<th scope="col">Total Core Hours</th>
|
||||
<td>{$statsQuery.data.stats[0].totalCoreHours}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<th scope="col">Total Accelerator Hours</th>
|
||||
<td>{$statsQuery.data.stats[0].totalAccHours}</td>
|
||||
</tr>
|
||||
</Table>
|
||||
</Col>
|
||||
<Col>
|
||||
<div bind:clientWidth={colWidth1}>
|
||||
<h5>Top
|
||||
<select class="p-0" bind:value={groupSelection}>
|
||||
{#each groupOptions as option}
|
||||
<option value={option}>
|
||||
{option.key.charAt(0).toUpperCase() + option.key.slice(1)}s
|
||||
</option>
|
||||
{/each}
|
||||
</select>
|
||||
</h5>
|
||||
{#key $topQuery.data}
|
||||
{#if $topQuery.fetching}
|
||||
<Spinner/>
|
||||
{:else if $topQuery.error}
|
||||
<Card body color="danger">{$topQuery.error.message}</Card>
|
||||
{:else}
|
||||
<Pie
|
||||
size={colWidth1}
|
||||
sliceLabel={sortSelection.label}
|
||||
quantities={$topQuery.data.topList.map((t) => t[sortSelection.key])}
|
||||
entities={$topQuery.data.topList.map((t) => t.id)}
|
||||
/>
|
||||
{/if}
|
||||
{/key}
|
||||
</div>
|
||||
<div class="col-3">
|
||||
{#key $statsQuery.data.stats[0].histNumNodes}
|
||||
<h4>Number of Nodes Distribution</h4>
|
||||
<Histogram
|
||||
width={colWidth - 25}
|
||||
data={$statsQuery.data.stats[0].histNumNodes}
|
||||
xlabel="Allocated Nodes [#]"
|
||||
ylabel="Number of Jobs" />
|
||||
</div>
|
||||
</Col>
|
||||
<Col>
|
||||
{#key $topQuery.data}
|
||||
{#if $topQuery.fetching}
|
||||
<Spinner/>
|
||||
{:else if $topQuery.error}
|
||||
<Card body color="danger">{$topQuery.error.message}</Card>
|
||||
{:else}
|
||||
<Table>
|
||||
<tr class="mb-2">
|
||||
<th>Legend</th>
|
||||
<th>{groupSelection.label}</th>
|
||||
<th>
|
||||
<select class="p-0" bind:value={sortSelection}>
|
||||
{#each sortOptions as option}
|
||||
<option value={option}>
|
||||
{option.label}
|
||||
</option>
|
||||
{/each}
|
||||
</select>
|
||||
</th>
|
||||
</tr>
|
||||
{#each $topQuery.data.topList as te, i}
|
||||
<tr>
|
||||
<td><Icon name="circle-fill" style="color: {colors[i]};"/></td>
|
||||
{#if groupSelection.key == 'User'}
|
||||
<th scope="col"><a href="/monitoring/user/{te.id}?cluster={cluster.name}">{te.id}</a></th>
|
||||
{:else}
|
||||
<th scope="col"><a href="/monitoring/jobs/?cluster={cluster.name}&project={te.id}&projectMatch=eq">{te.id}</a></th>
|
||||
{/if}
|
||||
<td>{te[sortSelection.key]}</td>
|
||||
</tr>
|
||||
{/each}
|
||||
</Table>
|
||||
{/if}
|
||||
{/key}
|
||||
</div>
|
||||
<div class="col-3">
|
||||
</Col>
|
||||
</Row>
|
||||
<Row cols={3} class="mb-2">
|
||||
<Col>
|
||||
{#if $rooflineQuery.fetching}
|
||||
<Spinner />
|
||||
{:else if $rooflineQuery.error}
|
||||
<Card body color="danger">{$rooflineQuery.error.message}</Card>
|
||||
{:else if $rooflineQuery.data && cluster}
|
||||
<div bind:clientWidth={colWidth2}>
|
||||
{#key $rooflineQuery.data}
|
||||
<Roofline
|
||||
width={colWidth - 25}
|
||||
width={colWidth2} height={300}
|
||||
tiles={$rooflineQuery.data.rooflineHeatmap}
|
||||
cluster={cluster.subClusters.length == 1 ? cluster.subClusters[0] : null}
|
||||
maxY={rooflineMaxY} />
|
||||
{/key}
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
</Col>
|
||||
<Col>
|
||||
<div bind:clientWidth={colWidth3}>
|
||||
{#key $statsQuery.data.stats[0].histDuration}
|
||||
<Histogram
|
||||
width={colWidth3} height={300}
|
||||
data={convert2uplot($statsQuery.data.stats[0].histDuration)}
|
||||
title="Duration Distribution"
|
||||
xlabel="Current Runtimes"
|
||||
xunit="Hours"
|
||||
ylabel="Number of Jobs"
|
||||
yunit="Jobs"/>
|
||||
{/key}
|
||||
</div>
|
||||
</Col>
|
||||
<Col>
|
||||
<div bind:clientWidth={colWidth4}>
|
||||
{#key $statsQuery.data.stats[0].histNumCores}
|
||||
<Histogram
|
||||
width={colWidth4} height={300}
|
||||
data={convert2uplot($statsQuery.data.stats[0].histNumCores)}
|
||||
title="Number of Cores Distribution"
|
||||
xlabel="Allocated Cores"
|
||||
xunit="Cores"
|
||||
ylabel="Number of Jobs"
|
||||
yunit="Jobs"/>
|
||||
{/key}
|
||||
</div>
|
||||
</Col>
|
||||
</Row>
|
||||
{/if}
|
||||
|
||||
<br/>
|
||||
<hr class="my-6"/>
|
||||
|
||||
{#if $footprintsQuery.error}
|
||||
<Row>
|
||||
<Col>
|
||||
@ -216,8 +367,9 @@
|
||||
<Row>
|
||||
<Col>
|
||||
<Card body>
|
||||
These histograms show the distribution of the averages of all jobs matching the filters. Each job/average is weighted by its node hours.
|
||||
Note that some metrics could be disabled for specific subclusters as per metriConfig and thus could affect shown average values.
|
||||
These histograms show the distribution of the averages of all jobs matching the filters. Each job/average is weighted by its node hours by default
|
||||
(Accelerator hours for native accelerator scope metrics, coreHours for native core scope metrics).
|
||||
Note that some metrics could be disabled for specific subclusters as per metricConfig and thus could affect shown average values.
|
||||
</Card>
|
||||
<br/>
|
||||
</Col>
|
||||
@ -229,19 +381,21 @@
|
||||
let:width
|
||||
renderFor="analysis"
|
||||
items={metricsInHistograms.map(metric => ({ metric, ...binsFromFootprint(
|
||||
$footprintsQuery.data.footprints.nodehours,
|
||||
$footprintsQuery.data.footprints.timeWeights,
|
||||
metricConfig(cluster.name, metric)?.scope,
|
||||
$footprintsQuery.data.footprints.metrics.find(f => f.metric == metric).data, numBins) }))}
|
||||
itemsPerRow={ccconfig.plot_view_plotsPerRow}>
|
||||
|
||||
<h4>Average Distribution of '{item.metric}'</h4>
|
||||
<Histogram
|
||||
data={convert2uplot(item.bins)}
|
||||
width={width} height={250}
|
||||
min={item.min} max={item.max}
|
||||
data={item.bins}
|
||||
label={item.label}
|
||||
xlabel={`${item.metric} Average [${(metricConfig(cluster.name, item.metric)?.unit?.prefix ? metricConfig(cluster.name, item.metric)?.unit?.prefix : '') +
|
||||
title="Average Distribution of '{item.metric}'"
|
||||
xlabel={`${item.metric} bin maximum [${(metricConfig(cluster.name, item.metric)?.unit?.prefix ? metricConfig(cluster.name, item.metric)?.unit?.prefix : '') +
|
||||
(metricConfig(cluster.name, item.metric)?.unit?.base ? metricConfig(cluster.name, item.metric)?.unit?.base : '')}]`}
|
||||
ylabel="Node Hours [h]" />
|
||||
xunit={`${(metricConfig(cluster.name, item.metric)?.unit?.prefix ? metricConfig(cluster.name, item.metric)?.unit?.prefix : '') +
|
||||
(metricConfig(cluster.name, item.metric)?.unit?.base ? metricConfig(cluster.name, item.metric)?.unit?.base : '')}`}
|
||||
ylabel="Normalized Hours"
|
||||
yunit="Hours"/>
|
||||
</PlotTable>
|
||||
</Col>
|
||||
</Row>
|
||||
@ -250,7 +404,7 @@
|
||||
<Col>
|
||||
<Card body>
|
||||
Each circle represents one job. The size of a circle is proportional to its node hours. Darker circles mean multiple jobs have the same averages for the respective metrics.
|
||||
Note that some metrics could be disabled for specific subclusters as per metriConfig and thus could affect shown average values.
|
||||
Note that some metrics could be disabled for specific subclusters as per metricConfig and thus could affect shown average values.
|
||||
</Card>
|
||||
<br/>
|
||||
</Col>
|
||||
@ -260,6 +414,7 @@
|
||||
<PlotTable
|
||||
let:item
|
||||
let:width
|
||||
renderFor="analysis"
|
||||
items={metricsInScatterplots.map(([m1, m2]) => ({
|
||||
m1, f1: $footprintsQuery.data.footprints.metrics.find(f => f.metric == m1).data,
|
||||
m2, f2: $footprintsQuery.data.footprints.metrics.find(f => f.metric == m2).data }))}
|
||||
@ -271,14 +426,14 @@
|
||||
(metricConfig(cluster.name, item.m1)?.unit?.base ? metricConfig(cluster.name, item.m1)?.unit?.base : '')}]`}
|
||||
yLabel={`${item.m2} [${(metricConfig(cluster.name, item.m2)?.unit?.prefix ? metricConfig(cluster.name, item.m2)?.unit?.prefix : '') +
|
||||
(metricConfig(cluster.name, item.m2)?.unit?.base ? metricConfig(cluster.name, item.m2)?.unit?.base : '')}]`}
|
||||
X={item.f1} Y={item.f2} S={$footprintsQuery.data.footprints.nodehours} />
|
||||
X={item.f1} Y={item.f2} S={$footprintsQuery.data.footprints.timeWeights.nodeHours} />
|
||||
</PlotTable>
|
||||
</Col>
|
||||
</Row>
|
||||
{/if}
|
||||
|
||||
<style>
|
||||
h4 {
|
||||
h5 {
|
||||
text-align: center;
|
||||
}
|
||||
</style>
|
||||
|
@ -1,110 +1,178 @@
|
||||
<script>
|
||||
import { Icon, Button, InputGroup, Input, Collapse,
|
||||
Navbar, NavbarBrand, Nav, NavItem, NavLink, NavbarToggler,
|
||||
Dropdown, DropdownToggle, DropdownMenu, DropdownItem, InputGroupText } from 'sveltestrap'
|
||||
import {
|
||||
Icon,
|
||||
Collapse,
|
||||
Navbar,
|
||||
NavbarBrand,
|
||||
Nav,
|
||||
NavbarToggler,
|
||||
Dropdown,
|
||||
DropdownToggle,
|
||||
DropdownMenu,
|
||||
} from "sveltestrap";
|
||||
import NavbarLinks from "./NavbarLinks.svelte";
|
||||
import NavbarTools from "./NavbarTools.svelte";
|
||||
|
||||
export let username // empty string if auth. is disabled, otherwise the username as string
|
||||
export let authlevel // Integer
|
||||
export let clusters // array of names
|
||||
export let roles // Role Enum-Like
|
||||
export let username; // empty string if auth. is disabled, otherwise the username as string
|
||||
export let authlevel; // Integer
|
||||
export let clusters; // array of names
|
||||
export let roles; // Role Enum-Like
|
||||
|
||||
let isOpen = false
|
||||
let isOpen = false;
|
||||
let screenSize;
|
||||
|
||||
const userviews = [
|
||||
{ title: 'My Jobs', href: `/monitoring/user/${username}`, icon: 'bar-chart-line-fill' },
|
||||
{ title: `Job Search`, href: '/monitoring/jobs/', icon: 'card-list' },
|
||||
{ title: 'Tags', href: '/monitoring/tags/', icon: 'tags' }
|
||||
]
|
||||
const jobsTitle = new Map();
|
||||
jobsTitle.set(2, "Job Search");
|
||||
jobsTitle.set(3, "Managed Jobs");
|
||||
jobsTitle.set(4, "Jobs");
|
||||
jobsTitle.set(5, "Jobs");
|
||||
const usersTitle = new Map();
|
||||
usersTitle.set(3, "Managed Users");
|
||||
usersTitle.set(4, "Users");
|
||||
usersTitle.set(5, "Users");
|
||||
|
||||
const managerviews = [
|
||||
{ title: 'My Jobs', href: `/monitoring/user/${username}`, icon: 'bar-chart-line-fill' },
|
||||
{ title: `Managed Jobs`, href: '/monitoring/jobs/', icon: 'card-list' },
|
||||
{ title: `Managed Users`, href: '/monitoring/users/', icon: 'people-fill' },
|
||||
{ title: 'Tags', href: '/monitoring/tags/', icon: 'tags' }
|
||||
]
|
||||
|
||||
const supportviews = [
|
||||
{ title: 'My Jobs', href: `/monitoring/user/${username}`, icon: 'bar-chart-line-fill' },
|
||||
{ title: 'Jobs', href: '/monitoring/jobs/', icon: 'card-list' },
|
||||
{ title: 'Users', href: '/monitoring/users/', icon: 'people-fill' },
|
||||
{ title: 'Projects', href: '/monitoring/projects/', icon: 'folder' },
|
||||
{ title: 'Tags', href: '/monitoring/tags/', icon: 'tags' }
|
||||
]
|
||||
|
||||
const adminviews = [
|
||||
{ title: 'My Jobs', href: `/monitoring/user/${username}`, icon: 'bar-chart-line-fill' },
|
||||
{ title: 'Jobs', href: '/monitoring/jobs/', icon: 'card-list' },
|
||||
{ title: 'Users', href: '/monitoring/users/', icon: 'people-fill' },
|
||||
{ title: 'Projects', href: '/monitoring/projects/', icon: 'folder' },
|
||||
{ title: 'Tags', href: '/monitoring/tags/', icon: 'tags' }
|
||||
]
|
||||
|
||||
const viewsPerCluster = [
|
||||
{ title: 'Analysis', requiredRole: roles.support, href: '/monitoring/analysis/', icon: 'graph-up' },
|
||||
{ title: 'Systems', requiredRole: roles.admin, href: '/monitoring/systems/', icon: 'cpu' },
|
||||
{ title: 'Status', requiredRole: roles.admin, href: '/monitoring/status/', icon: 'cpu' },
|
||||
]
|
||||
const views = [
|
||||
{
|
||||
title: "My Jobs",
|
||||
requiredRole: roles.user,
|
||||
href: `/monitoring/user/${username}`,
|
||||
icon: "bar-chart-line-fill",
|
||||
perCluster: false,
|
||||
menu: "none",
|
||||
},
|
||||
{
|
||||
title: jobsTitle.get(authlevel),
|
||||
requiredRole: roles.user,
|
||||
href: `/monitoring/jobs/`,
|
||||
icon: "card-list",
|
||||
perCluster: false,
|
||||
menu: "none",
|
||||
},
|
||||
{
|
||||
title: usersTitle.get(authlevel),
|
||||
requiredRole: roles.manager,
|
||||
href: "/monitoring/users/",
|
||||
icon: "people-fill",
|
||||
perCluster: false,
|
||||
menu: "Groups",
|
||||
},
|
||||
{
|
||||
title: "Projects",
|
||||
requiredRole: roles.support,
|
||||
href: "/monitoring/projects/",
|
||||
icon: "folder",
|
||||
perCluster: false,
|
||||
menu: "Groups",
|
||||
},
|
||||
{
|
||||
title: "Tags",
|
||||
requiredRole: roles.user,
|
||||
href: "/monitoring/tags/",
|
||||
icon: "tags",
|
||||
perCluster: false,
|
||||
menu: "Groups",
|
||||
},
|
||||
{
|
||||
title: "Analysis",
|
||||
requiredRole: roles.support,
|
||||
href: "/monitoring/analysis/",
|
||||
icon: "graph-up",
|
||||
perCluster: true,
|
||||
menu: "Stats",
|
||||
},
|
||||
{
|
||||
title: "Nodes",
|
||||
requiredRole: roles.admin,
|
||||
href: "/monitoring/systems/",
|
||||
icon: "cpu",
|
||||
perCluster: true,
|
||||
menu: "Groups",
|
||||
},
|
||||
{
|
||||
title: "Status",
|
||||
requiredRole: roles.admin,
|
||||
href: "/monitoring/status/",
|
||||
icon: "cpu",
|
||||
perCluster: true,
|
||||
menu: "Stats",
|
||||
},
|
||||
];
|
||||
</script>
|
||||
|
||||
<Navbar color="light" light expand="lg" fixed="top">
|
||||
<svelte:window bind:innerWidth={screenSize} />
|
||||
<Navbar color="light" light expand="md" fixed="top">
|
||||
<NavbarBrand href="/">
|
||||
<img alt="ClusterCockpit Logo" src="/img/logo.png" height="25rem">
|
||||
<img alt="ClusterCockpit Logo" src="/img/logo.png" height="25rem" />
|
||||
</NavbarBrand>
|
||||
<NavbarToggler on:click={() => (isOpen = !isOpen)} />
|
||||
<Collapse {isOpen} navbar expand="lg" on:update={({ detail }) => (isOpen = detail.isOpen)}>
|
||||
<Nav pills>
|
||||
{#if authlevel == roles.admin}
|
||||
{#each adminviews as item}
|
||||
<NavLink href={item.href} active={window.location.pathname == item.href}><Icon name={item.icon}/> {item.title}</NavLink>
|
||||
{/each}
|
||||
{:else if authlevel == roles.support}
|
||||
{#each supportviews as item}
|
||||
<NavLink href={item.href} active={window.location.pathname == item.href}><Icon name={item.icon}/> {item.title}</NavLink>
|
||||
{/each}
|
||||
{:else if authlevel == roles.manager}
|
||||
{#each managerviews as item}
|
||||
<NavLink href={item.href} active={window.location.pathname == item.href}><Icon name={item.icon}/> {item.title}</NavLink>
|
||||
{/each}
|
||||
{:else} <!-- Compatibility: Handle "user role" or "no role" as identical-->
|
||||
{#each userviews as item}
|
||||
<NavLink href={item.href} active={window.location.pathname == item.href}><Icon name={item.icon}/> {item.title}</NavLink>
|
||||
{/each}
|
||||
{/if}
|
||||
{#each viewsPerCluster.filter(item => item.requiredRole <= authlevel) as item}
|
||||
<NavItem>
|
||||
<Dropdown nav inNavbar>
|
||||
<Collapse
|
||||
style="justify-content: space-between"
|
||||
{isOpen}
|
||||
navbar
|
||||
expand="md"
|
||||
on:update={({ detail }) => (isOpen = detail.isOpen)}
|
||||
>
|
||||
<Nav navbar>
|
||||
{#if screenSize > 1500 || screenSize < 768}
|
||||
<NavbarLinks
|
||||
{clusters}
|
||||
links={views.filter(
|
||||
(item) => item.requiredRole <= authlevel
|
||||
)}
|
||||
/>
|
||||
{:else if screenSize > 1300}
|
||||
<NavbarLinks
|
||||
{clusters}
|
||||
links={views.filter(
|
||||
(item) =>
|
||||
item.requiredRole <= authlevel &&
|
||||
item.menu != "Stats"
|
||||
)}
|
||||
/>
|
||||
<Dropdown nav>
|
||||
<DropdownToggle nav caret>
|
||||
<Icon name="graph-up" />
|
||||
Stats
|
||||
</DropdownToggle>
|
||||
<DropdownMenu class="dropdown-menu-lg-end">
|
||||
<NavbarLinks
|
||||
{clusters}
|
||||
links={views.filter(
|
||||
(item) =>
|
||||
item.requiredRole <= authlevel &&
|
||||
item.menu == "Stats"
|
||||
)}
|
||||
/>
|
||||
</DropdownMenu>
|
||||
</Dropdown>
|
||||
{:else}
|
||||
<NavbarLinks
|
||||
{clusters}
|
||||
links={views.filter(
|
||||
(item) =>
|
||||
item.requiredRole <= authlevel &&
|
||||
item.menu == "none"
|
||||
)}
|
||||
/>
|
||||
{#each Array("Groups", "Stats") as menu}
|
||||
<Dropdown nav>
|
||||
<DropdownToggle nav caret>
|
||||
<Icon name={item.icon}/> {item.title}
|
||||
{menu}
|
||||
</DropdownToggle>
|
||||
<DropdownMenu>
|
||||
{#each clusters as cluster}
|
||||
<DropdownItem href={item.href + cluster.name} active={window.location.pathname == item.href + cluster.name}>
|
||||
{cluster.name}
|
||||
</DropdownItem>
|
||||
{/each}
|
||||
<DropdownMenu class="dropdown-menu-lg-end">
|
||||
<NavbarLinks
|
||||
{clusters}
|
||||
links={views.filter(
|
||||
(item) =>
|
||||
item.requiredRole <= authlevel &&
|
||||
item.menu == menu
|
||||
)}
|
||||
/>
|
||||
</DropdownMenu>
|
||||
</Dropdown>
|
||||
</NavItem>
|
||||
{/each}
|
||||
{/each}
|
||||
{/if}
|
||||
</Nav>
|
||||
<NavbarTools {username} {authlevel} {roles} {screenSize} />
|
||||
</Collapse>
|
||||
<div class="d-flex">
|
||||
<form method="GET" action="/search">
|
||||
<InputGroup>
|
||||
<Input type="text" placeholder="Search 'type:<query>' ..." name="searchId"/>
|
||||
<Button outline type="submit"><Icon name="search"/></Button>
|
||||
<InputGroupText style="cursor:help;" title={(authlevel >= roles.support) ? "Example: 'projectId:a100cd', Types are: jobId | jobName | projectId | arrayJobId | username | name" : "Example: 'jobName:myjob', Types are jobId | jobName | projectId | arrayJobId "}><Icon name="info-circle"/></InputGroupText>
|
||||
</InputGroup>
|
||||
</form>
|
||||
{#if username}
|
||||
<form method="POST" action="/logout">
|
||||
<Button outline color="success" type="submit" style="margin-left: 10px;">
|
||||
<Icon name="box-arrow-right"/> Logout {username}
|
||||
</Button>
|
||||
</form>
|
||||
{/if}
|
||||
<Button outline on:click={() => window.location.href = '/config'} style="margin-left: 10px;">
|
||||
<Icon name="gear"/>
|
||||
</Button>
|
||||
</div>
|
||||
</Navbar>
|
||||
|
@ -20,12 +20,11 @@
|
||||
} from "sveltestrap";
|
||||
import PlotTable from "./PlotTable.svelte";
|
||||
import Metric from "./Metric.svelte";
|
||||
import PolarPlot from "./plots/Polar.svelte";
|
||||
import Polar from "./plots/Polar.svelte";
|
||||
import Roofline from "./plots/Roofline.svelte";
|
||||
import JobInfo from "./joblist/JobInfo.svelte";
|
||||
import TagManagement from "./TagManagement.svelte";
|
||||
import MetricSelection from "./MetricSelection.svelte";
|
||||
import Zoom from "./Zoom.svelte";
|
||||
import StatsTable from "./StatsTable.svelte";
|
||||
import { getContext } from "svelte";
|
||||
|
||||
@ -33,6 +32,9 @@
|
||||
export let authlevel;
|
||||
export let roles;
|
||||
|
||||
const accMetrics = ['acc_utilization', 'acc_mem_used', 'acc_power', 'nv_mem_util', 'nv_sm_clock', 'nv_temp'];
|
||||
let accNodeOnly
|
||||
|
||||
const { query: initq } = init(`
|
||||
job(id: "${dbid}") {
|
||||
id, jobId, user, project, cluster, startTime,
|
||||
@ -76,8 +78,7 @@
|
||||
]);
|
||||
|
||||
// Select default Scopes to load: Check before if accelerator metrics are not on accelerator scope by default
|
||||
const accMetrics = ['acc_utilization', 'acc_mem_used', 'acc_power', 'nv_mem_util', 'nv_sm_clock', 'nv_temp']
|
||||
const accNodeOnly = [...toFetch].some(function(m) {
|
||||
accNodeOnly = [...toFetch].some(function(m) {
|
||||
if (accMetrics.includes(m)) {
|
||||
const mc = metrics(job.cluster, m)
|
||||
return mc.scope !== 'accelerator'
|
||||
@ -132,7 +133,6 @@
|
||||
jobTags,
|
||||
fullWidth,
|
||||
statsTable;
|
||||
$: polarPlotSize = Math.min(fullWidth / 3 - 10, 300);
|
||||
$: document.title = $initq.fetching
|
||||
? "Loading..."
|
||||
: $initq.error
|
||||
@ -244,9 +244,8 @@
|
||||
{/if}
|
||||
{/if}
|
||||
<Col>
|
||||
<PolarPlot
|
||||
width={polarPlotSize}
|
||||
height={polarPlotSize}
|
||||
<Polar
|
||||
size={fullWidth / 4.1}
|
||||
metrics={ccconfig[
|
||||
`job_view_polarPlotMetrics:${$initq.data.job.cluster}`
|
||||
] || ccconfig[`job_view_polarPlotMetrics`]}
|
||||
@ -257,7 +256,7 @@
|
||||
<Col>
|
||||
<Roofline
|
||||
width={fullWidth / 3 - 10}
|
||||
height={polarPlotSize}
|
||||
height={fullWidth / 5}
|
||||
cluster={clusters
|
||||
.find((c) => c.name == $initq.data.job.cluster)
|
||||
.subClusters.find(
|
||||
@ -290,9 +289,9 @@
|
||||
</Button>
|
||||
{/if}
|
||||
</Col>
|
||||
<Col xs="auto">
|
||||
<!-- <Col xs="auto">
|
||||
<Zoom timeseriesPlots={plots} />
|
||||
</Col>
|
||||
</Col> -->
|
||||
</Row>
|
||||
<br />
|
||||
<Row>
|
||||
@ -329,6 +328,7 @@
|
||||
scopes={item.data.map((x) => x.scope)}
|
||||
{width}
|
||||
isShared={$initq.data.job.exclusive != 1}
|
||||
resources={$initq.data.job.resources}
|
||||
/>
|
||||
{:else}
|
||||
<Card body color="warning"
|
||||
@ -396,6 +396,8 @@
|
||||
bind:this={statsTable}
|
||||
job={$initq.data.job}
|
||||
jobMetrics={$jobMetrics.data.jobMetrics}
|
||||
accMetrics={accMetrics}
|
||||
accNodeOnly={accNodeOnly}
|
||||
/>
|
||||
{/key}
|
||||
{/if}
|
||||
|
@ -89,6 +89,7 @@
|
||||
timestep={data.timestep}
|
||||
scope={selectedScope} metric={metricName}
|
||||
series={series}
|
||||
isShared={isShared} />
|
||||
isShared={isShared}
|
||||
resources={job.resources} />
|
||||
{/if}
|
||||
{/key}
|
||||
|
39
web/frontend/src/NavbarLinks.svelte
Normal file
39
web/frontend/src/NavbarLinks.svelte
Normal file
@ -0,0 +1,39 @@
|
||||
<script>
|
||||
import {
|
||||
Icon,
|
||||
NavLink,
|
||||
Dropdown,
|
||||
DropdownToggle,
|
||||
DropdownMenu,
|
||||
DropdownItem,
|
||||
} from "sveltestrap";
|
||||
|
||||
export let clusters; // array of names
|
||||
export let links; // array of nav links
|
||||
</script>
|
||||
|
||||
{#each links as item}
|
||||
{#if !item.perCluster}
|
||||
<NavLink href={item.href} active={window.location.pathname == item.href}
|
||||
><Icon name={item.icon} /> {item.title}</NavLink
|
||||
>
|
||||
{:else}
|
||||
<Dropdown nav inNavbar>
|
||||
<DropdownToggle nav caret>
|
||||
<Icon name={item.icon} />
|
||||
{item.title}
|
||||
</DropdownToggle>
|
||||
<DropdownMenu class="dropdown-menu-lg-end">
|
||||
{#each clusters as cluster}
|
||||
<DropdownItem
|
||||
href={item.href + cluster.name}
|
||||
active={window.location.pathname ==
|
||||
item.href + cluster.name}
|
||||
>
|
||||
{cluster.name}
|
||||
</DropdownItem>
|
||||
{/each}
|
||||
</DropdownMenu>
|
||||
</Dropdown>
|
||||
{/if}
|
||||
{/each}
|
127
web/frontend/src/NavbarTools.svelte
Normal file
127
web/frontend/src/NavbarTools.svelte
Normal file
@ -0,0 +1,127 @@
|
||||
<script>
|
||||
import {
|
||||
Icon,
|
||||
Nav,
|
||||
NavItem,
|
||||
InputGroup,
|
||||
Input,
|
||||
Button,
|
||||
InputGroupText,
|
||||
Container,
|
||||
Row,
|
||||
Col,
|
||||
} from "sveltestrap";
|
||||
|
||||
export let username; // empty string if auth. is disabled, otherwise the username as string
|
||||
export let authlevel; // Integer
|
||||
export let roles; // Role Enum-Like
|
||||
export let screenSize; // screensize
|
||||
</script>
|
||||
|
||||
<Nav navbar>
|
||||
{#if screenSize >= 768}
|
||||
<NavItem>
|
||||
<form method="GET" action="/search">
|
||||
<InputGroup>
|
||||
<Input
|
||||
type="text"
|
||||
placeholder="Search 'type:<query>' ..."
|
||||
name="searchId"
|
||||
style="margin-left: 10px;"
|
||||
/>
|
||||
<!-- bootstrap classes w/o effect -->
|
||||
<Button outline type="submit"><Icon name="search" /></Button
|
||||
>
|
||||
<InputGroupText
|
||||
style="cursor:help;"
|
||||
title={authlevel >= roles.support
|
||||
? "Example: 'projectId:a100cd', Types are: jobId | jobName | projectId | arrayJobId | username | name"
|
||||
: "Example: 'jobName:myjob', Types are jobId | jobName | projectId | arrayJobId "}
|
||||
><Icon name="info-circle" /></InputGroupText
|
||||
>
|
||||
</InputGroup>
|
||||
</form>
|
||||
</NavItem>
|
||||
{#if username}
|
||||
<NavItem>
|
||||
<form method="POST" action="/logout">
|
||||
<Button
|
||||
outline
|
||||
color="success"
|
||||
type="submit"
|
||||
style="margin-left: 10px;"
|
||||
>
|
||||
{#if screenSize > 1630}
|
||||
<Icon name="box-arrow-right" /> Logout {username}
|
||||
{:else}
|
||||
<Icon name="box-arrow-right" />
|
||||
{/if}
|
||||
</Button>
|
||||
</form>
|
||||
</NavItem>
|
||||
{/if}
|
||||
<NavItem>
|
||||
<Button
|
||||
outline
|
||||
on:click={() => (window.location.href = "/config")}
|
||||
style="margin-left: 10px;"
|
||||
>
|
||||
<Icon name="gear" />
|
||||
</Button>
|
||||
</NavItem>
|
||||
{:else}
|
||||
<NavItem>
|
||||
<Container>
|
||||
<Row cols={2}>
|
||||
<Col xs="6">
|
||||
<form method="POST" action="/logout">
|
||||
<Button
|
||||
outline
|
||||
color="success"
|
||||
type="submit"
|
||||
size="sm"
|
||||
class="my-2 w-100"
|
||||
>
|
||||
<Icon name="box-arrow-right" /> Logout {username}
|
||||
</Button>
|
||||
</form>
|
||||
</Col>
|
||||
<Col xs="6">
|
||||
<Button
|
||||
outline
|
||||
on:click={() => (window.location.href = "/config")}
|
||||
size="sm"
|
||||
class="my-2 w-100"
|
||||
>
|
||||
{#if authlevel >= roles.admin}
|
||||
<Icon name="gear" /> Admin Settings
|
||||
{:else}
|
||||
<Icon name="gear" /> Plotting Options
|
||||
{/if}
|
||||
</Button>
|
||||
</Col>
|
||||
</Row>
|
||||
</Container>
|
||||
</NavItem>
|
||||
<NavItem style="margin-left: 10px; margin-right:10px;">
|
||||
<form method="GET" action="/search">
|
||||
<InputGroup>
|
||||
<Input
|
||||
type="text"
|
||||
placeholder="Search 'type:<query>' ..."
|
||||
name="searchId"
|
||||
/>
|
||||
<Button outline type="submit"><Icon name="search" /></Button
|
||||
>
|
||||
<InputGroupText
|
||||
style="cursor:help;"
|
||||
title={authlevel >= roles.support
|
||||
? "Example: 'projectId:a100cd', Types are: jobId | jobName | projectId | arrayJobId | username | name"
|
||||
: "Example: 'jobName:myjob', Types are jobId | jobName | projectId | arrayJobId "}
|
||||
><Icon name="info-circle" /></InputGroupText
|
||||
>
|
||||
</InputGroup>
|
||||
</form>
|
||||
</NavItem>
|
||||
{/if}
|
||||
</Nav>
|
@ -11,6 +11,7 @@
|
||||
} from "sveltestrap";
|
||||
import { queryStore, gql, getContextClient } from "@urql/svelte";
|
||||
import TimeSelection from "./filters/TimeSelection.svelte";
|
||||
import Refresher from './joblist/Refresher.svelte';
|
||||
import PlotTable from "./PlotTable.svelte";
|
||||
import MetricPlot from "./plots/MetricPlot.svelte";
|
||||
import { getContext } from "svelte";
|
||||
@ -160,6 +161,13 @@
|
||||
No currently running jobs.
|
||||
{/if}
|
||||
</Col>
|
||||
<Col>
|
||||
<Refresher on:reload={() => {
|
||||
const diff = Date.now() - to
|
||||
from = new Date(from.getTime() + diff)
|
||||
to = new Date(to.getTime() + diff)
|
||||
}} />
|
||||
</Col>
|
||||
<Col>
|
||||
<TimeSelection bind:from bind:to />
|
||||
</Col>
|
||||
@ -203,6 +211,8 @@
|
||||
subCluster={$nodeMetricsData.data.nodeMetrics[0]
|
||||
.subCluster}
|
||||
series={item.metric.series}
|
||||
resources={[{hostname: hostname}]}
|
||||
forNode={true}
|
||||
/>
|
||||
{:else if item.disabled === true && item.metric}
|
||||
<Card
|
||||
|
@ -7,6 +7,8 @@
|
||||
|
||||
export let job
|
||||
export let jobMetrics
|
||||
export let accMetrics
|
||||
export let accNodeOnly
|
||||
|
||||
const allMetrics = [...new Set(jobMetrics.map(m => m.name))].sort(),
|
||||
scopesForMetric = (metric) => jobMetrics
|
||||
@ -19,9 +21,19 @@
|
||||
isMetricSelectionOpen = false,
|
||||
selectedMetrics = getContext('cc-config')[`job_view_nodestats_selectedMetrics:${job.cluster}`]
|
||||
|| getContext('cc-config')['job_view_nodestats_selectedMetrics']
|
||||
|
||||
|
||||
for (let metric of allMetrics) {
|
||||
selectedScopes[metric] = maxScope(scopesForMetric(metric))
|
||||
// Not Exclusive or Single Node: Get maxScope()
|
||||
// No Accelerators in Job and not Acc-Metric: Use 'core'
|
||||
// Accelerator Metric available on accelerator scope: Use 'accelerator'
|
||||
// Accelerator Metric only on node scope: Fallback to 'node'
|
||||
selectedScopes[metric] = (job.exclusive != 1 || job.numNodes == 1) ?
|
||||
(job.numAccs != 0 && accMetrics.includes(metric)) ?
|
||||
accNodeOnly ?
|
||||
'node'
|
||||
: 'accelerator'
|
||||
: 'core'
|
||||
: maxScope(scopesForMetric(metric))
|
||||
sorting[metric] = {
|
||||
min: { dir: 'up', active: false },
|
||||
avg: { dir: 'up', active: false },
|
||||
|
@ -1,12 +1,49 @@
|
||||
<script>
|
||||
import { Icon } from 'sveltestrap'
|
||||
|
||||
export let host
|
||||
export let metric
|
||||
export let scope
|
||||
export let jobMetrics
|
||||
|
||||
function compareNumbers(a, b) {
|
||||
return a.id - b.id;
|
||||
}
|
||||
|
||||
function sortByField(field) {
|
||||
let s = sorting[field]
|
||||
if (s.active) {
|
||||
s.dir = s.dir == 'up' ? 'down' : 'up'
|
||||
} else {
|
||||
for (let field in sorting)
|
||||
sorting[field].active = false
|
||||
s.active = true
|
||||
}
|
||||
|
||||
sorting = {...sorting}
|
||||
series = series.sort((a, b) => {
|
||||
if (a == null || b == null)
|
||||
return -1
|
||||
|
||||
if (field === 'id') {
|
||||
return s.dir != 'up' ? a[field] - b[field] : b[field] - a[field]
|
||||
} else {
|
||||
return s.dir != 'up' ? a.statistics[field] - b.statistics[field] : b.statistics[field] - a.statistics[field]
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
let sorting = {
|
||||
id: { dir: 'down', active: true },
|
||||
min: { dir: 'up', active: false },
|
||||
avg: { dir: 'up', active: false },
|
||||
max: { dir: 'up', active: false },
|
||||
}
|
||||
|
||||
$: series = jobMetrics
|
||||
.find(jm => jm.name == metric && jm.scope == scope)
|
||||
?.metric.series.filter(s => s.hostname == host && s.statistics != null)
|
||||
?.sort(compareNumbers)
|
||||
</script>
|
||||
|
||||
{#if series == null || series.length == 0}
|
||||
@ -24,6 +61,14 @@
|
||||
{:else}
|
||||
<td colspan="4">
|
||||
<table style="width: 100%;">
|
||||
<tr>
|
||||
{#each ['id', 'min', 'avg', 'max'] as field}
|
||||
<th on:click={() => sortByField(field)}>
|
||||
Sort
|
||||
<Icon name="caret-{sorting[field].dir}{sorting[field].active ? '-fill' : ''}" />
|
||||
</th>
|
||||
{/each}
|
||||
</tr>
|
||||
{#each series as s, i}
|
||||
<tr>
|
||||
<th>{s.id ?? i}</th>
|
||||
|
@ -1,81 +1,319 @@
|
||||
<script>
|
||||
import Refresher from './joblist/Refresher.svelte'
|
||||
import Roofline, { transformPerNodeData } from './plots/Roofline.svelte'
|
||||
import Histogram from './plots/Histogram.svelte'
|
||||
import { Row, Col, Spinner, Card, CardHeader, CardTitle, CardBody, Table, Progress, Icon } from 'sveltestrap'
|
||||
import { init } from './utils.js'
|
||||
import { scaleNumbers } from './units.js'
|
||||
import { queryStore, gql, getContextClient } from '@urql/svelte'
|
||||
import { getContext } from "svelte";
|
||||
import Refresher from "./joblist/Refresher.svelte";
|
||||
import Roofline, { transformPerNodeData } from "./plots/Roofline.svelte";
|
||||
import Pie, { colors } from "./plots/Pie.svelte";
|
||||
import Histogram from "./plots/Histogram.svelte";
|
||||
import {
|
||||
Row,
|
||||
Col,
|
||||
Spinner,
|
||||
Card,
|
||||
CardHeader,
|
||||
CardTitle,
|
||||
CardBody,
|
||||
Table,
|
||||
Progress,
|
||||
Icon,
|
||||
} from "sveltestrap";
|
||||
import { init, convert2uplot } from "./utils.js";
|
||||
import { scaleNumbers } from "./units.js";
|
||||
import {
|
||||
queryStore,
|
||||
gql,
|
||||
getContextClient,
|
||||
mutationStore,
|
||||
} from "@urql/svelte";
|
||||
|
||||
const { query: initq } = init()
|
||||
const { query: initq } = init();
|
||||
const ccconfig = getContext("cc-config");
|
||||
|
||||
export let cluster
|
||||
export let cluster;
|
||||
|
||||
let plotWidths = [], colWidth1 = 0, colWidth2
|
||||
let from = new Date(Date.now() - 5 * 60 * 1000), to = new Date(Date.now())
|
||||
let plotWidths = [],
|
||||
colWidth1 = 0,
|
||||
colWidth2;
|
||||
let from = new Date(Date.now() - 5 * 60 * 1000),
|
||||
to = new Date(Date.now());
|
||||
const topOptions = [
|
||||
{ key: "totalJobs", label: "Jobs" },
|
||||
{ key: "totalNodes", label: "Nodes" },
|
||||
{ key: "totalCores", label: "Cores" },
|
||||
{ key: "totalAccs", label: "Accelerators" },
|
||||
];
|
||||
|
||||
let topProjectSelection =
|
||||
topOptions.find(
|
||||
(option) =>
|
||||
option.key ==
|
||||
ccconfig[`status_view_selectedTopProjectCategory:${cluster}`]
|
||||
) ||
|
||||
topOptions.find(
|
||||
(option) =>
|
||||
option.key == ccconfig.status_view_selectedTopProjectCategory
|
||||
);
|
||||
let topUserSelection =
|
||||
topOptions.find(
|
||||
(option) =>
|
||||
option.key ==
|
||||
ccconfig[`status_view_selectedTopUserCategory:${cluster}`]
|
||||
) ||
|
||||
topOptions.find(
|
||||
(option) =>
|
||||
option.key == ccconfig.status_view_selectedTopUserCategory
|
||||
);
|
||||
|
||||
const client = getContextClient();
|
||||
$: mainQuery = queryStore({
|
||||
client: client,
|
||||
query: gql`query($cluster: String!, $filter: [JobFilter!]!, $metrics: [String!], $from: Time!, $to: Time!) {
|
||||
nodeMetrics(cluster: $cluster, metrics: $metrics, from: $from, to: $to) {
|
||||
host
|
||||
subCluster
|
||||
metrics {
|
||||
name
|
||||
scope
|
||||
metric {
|
||||
timestep
|
||||
unit { base, prefix }
|
||||
series { data }
|
||||
query: gql`
|
||||
query (
|
||||
$cluster: String!
|
||||
$filter: [JobFilter!]!
|
||||
$metrics: [String!]
|
||||
$from: Time!
|
||||
$to: Time!
|
||||
) {
|
||||
nodeMetrics(
|
||||
cluster: $cluster
|
||||
metrics: $metrics
|
||||
from: $from
|
||||
to: $to
|
||||
) {
|
||||
host
|
||||
subCluster
|
||||
metrics {
|
||||
name
|
||||
scope
|
||||
metric {
|
||||
timestep
|
||||
unit {
|
||||
base
|
||||
prefix
|
||||
}
|
||||
series {
|
||||
data
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stats: jobsStatistics(filter: $filter) {
|
||||
histDuration {
|
||||
count
|
||||
value
|
||||
}
|
||||
histNumNodes {
|
||||
count
|
||||
value
|
||||
}
|
||||
histNumCores {
|
||||
count
|
||||
value
|
||||
}
|
||||
histNumAccs {
|
||||
count
|
||||
value
|
||||
}
|
||||
}
|
||||
|
||||
allocatedNodes(cluster: $cluster) {
|
||||
name
|
||||
count
|
||||
}
|
||||
}
|
||||
}
|
||||
`,
|
||||
variables: {
|
||||
cluster: cluster,
|
||||
metrics: ["flops_any", "mem_bw"],
|
||||
from: from.toISOString(),
|
||||
to: to.toISOString(),
|
||||
filter: [{ state: ["running"] }, { cluster: { eq: cluster } }],
|
||||
},
|
||||
});
|
||||
|
||||
stats: jobsStatistics(filter: $filter) {
|
||||
histDuration { count, value }
|
||||
histNumNodes { count, value }
|
||||
}
|
||||
const paging = { itemsPerPage: 10, page: 1 }; // Top 10
|
||||
$: topUserQuery = queryStore({
|
||||
client: client,
|
||||
query: gql`
|
||||
query (
|
||||
$filter: [JobFilter!]!
|
||||
$paging: PageRequest!
|
||||
$sortBy: SortByAggregate!
|
||||
) {
|
||||
topUser: jobsStatistics(
|
||||
filter: $filter
|
||||
page: $paging
|
||||
sortBy: $sortBy
|
||||
groupBy: USER
|
||||
) {
|
||||
id
|
||||
totalJobs
|
||||
totalNodes
|
||||
totalCores
|
||||
totalAccs
|
||||
}
|
||||
}
|
||||
`,
|
||||
variables: {
|
||||
filter: [{ state: ["running"] }, { cluster: { eq: cluster } }],
|
||||
paging,
|
||||
sortBy: topUserSelection.key.toUpperCase(),
|
||||
},
|
||||
});
|
||||
|
||||
allocatedNodes(cluster: $cluster) { name, count }
|
||||
topUsers: jobsCount(filter: $filter, groupBy: USER, weight: NODE_COUNT, limit: 10) { name, count }
|
||||
topProjects: jobsCount(filter: $filter, groupBy: PROJECT, weight: NODE_COUNT, limit: 10) { name, count }
|
||||
}`,
|
||||
variables: {
|
||||
cluster: cluster, metrics: ['flops_any', 'mem_bw'], from: from.toISOString(), to: to.toISOString(),
|
||||
filter: [{ state: ['running'] }, { cluster: { eq: cluster } }]
|
||||
}
|
||||
})
|
||||
$: topProjectQuery = queryStore({
|
||||
client: client,
|
||||
query: gql`
|
||||
query (
|
||||
$filter: [JobFilter!]!
|
||||
$paging: PageRequest!
|
||||
$sortBy: SortByAggregate!
|
||||
) {
|
||||
topProjects: jobsStatistics(
|
||||
filter: $filter
|
||||
page: $paging
|
||||
sortBy: $sortBy
|
||||
groupBy: PROJECT
|
||||
) {
|
||||
id
|
||||
totalJobs
|
||||
totalNodes
|
||||
totalCores
|
||||
totalAccs
|
||||
}
|
||||
}
|
||||
`,
|
||||
variables: {
|
||||
filter: [{ state: ["running"] }, { cluster: { eq: cluster } }],
|
||||
paging,
|
||||
sortBy: topProjectSelection.key.toUpperCase(),
|
||||
},
|
||||
});
|
||||
|
||||
const sumUp = (data, subcluster, metric) => data.reduce((sum, node) => node.subCluster == subcluster
|
||||
? sum + (node.metrics.find(m => m.name == metric)?.metric.series.reduce((sum, series) => sum + series.data[series.data.length - 1], 0) || 0)
|
||||
: sum, 0)
|
||||
const sumUp = (data, subcluster, metric) =>
|
||||
data.reduce(
|
||||
(sum, node) =>
|
||||
node.subCluster == subcluster
|
||||
? sum +
|
||||
(node.metrics
|
||||
.find((m) => m.name == metric)
|
||||
?.metric.series.reduce(
|
||||
(sum, series) =>
|
||||
sum + series.data[series.data.length - 1],
|
||||
0
|
||||
) || 0)
|
||||
: sum,
|
||||
0
|
||||
);
|
||||
|
||||
let allocatedNodes = {}, flopRate = {}, flopRateUnitPrefix = {}, flopRateUnitBase = {}, memBwRate = {}, memBwRateUnitPrefix = {}, memBwRateUnitBase = {}
|
||||
let allocatedNodes = {},
|
||||
flopRate = {},
|
||||
flopRateUnitPrefix = {},
|
||||
flopRateUnitBase = {},
|
||||
memBwRate = {},
|
||||
memBwRateUnitPrefix = {},
|
||||
memBwRateUnitBase = {};
|
||||
$: if ($initq.data && $mainQuery.data) {
|
||||
let subClusters = $initq.data.clusters.find(c => c.name == cluster).subClusters
|
||||
let subClusters = $initq.data.clusters.find(
|
||||
(c) => c.name == cluster
|
||||
).subClusters;
|
||||
for (let subCluster of subClusters) {
|
||||
allocatedNodes[subCluster.name] = $mainQuery.data.allocatedNodes.find(({ name }) => name == subCluster.name)?.count || 0
|
||||
flopRate[subCluster.name] = Math.floor(sumUp($mainQuery.data.nodeMetrics, subCluster.name, 'flops_any') * 100) / 100
|
||||
flopRateUnitPrefix[subCluster.name] = subCluster.flopRateSimd.unit.prefix
|
||||
flopRateUnitBase[subCluster.name] = subCluster.flopRateSimd.unit.base
|
||||
memBwRate[subCluster.name] = Math.floor(sumUp($mainQuery.data.nodeMetrics, subCluster.name, 'mem_bw') * 100) / 100
|
||||
memBwRateUnitPrefix[subCluster.name] = subCluster.memoryBandwidth.unit.prefix
|
||||
memBwRateUnitBase[subCluster.name] = subCluster.memoryBandwidth.unit.base
|
||||
allocatedNodes[subCluster.name] =
|
||||
$mainQuery.data.allocatedNodes.find(
|
||||
({ name }) => name == subCluster.name
|
||||
)?.count || 0;
|
||||
flopRate[subCluster.name] =
|
||||
Math.floor(
|
||||
sumUp(
|
||||
$mainQuery.data.nodeMetrics,
|
||||
subCluster.name,
|
||||
"flops_any"
|
||||
) * 100
|
||||
) / 100;
|
||||
flopRateUnitPrefix[subCluster.name] =
|
||||
subCluster.flopRateSimd.unit.prefix;
|
||||
flopRateUnitBase[subCluster.name] =
|
||||
subCluster.flopRateSimd.unit.base;
|
||||
memBwRate[subCluster.name] =
|
||||
Math.floor(
|
||||
sumUp(
|
||||
$mainQuery.data.nodeMetrics,
|
||||
subCluster.name,
|
||||
"mem_bw"
|
||||
) * 100
|
||||
) / 100;
|
||||
memBwRateUnitPrefix[subCluster.name] =
|
||||
subCluster.memoryBandwidth.unit.prefix;
|
||||
memBwRateUnitBase[subCluster.name] =
|
||||
subCluster.memoryBandwidth.unit.base;
|
||||
}
|
||||
}
|
||||
|
||||
const updateConfigurationMutation = ({ name, value }) => {
|
||||
return mutationStore({
|
||||
client: client,
|
||||
query: gql`
|
||||
mutation ($name: String!, $value: String!) {
|
||||
updateConfiguration(name: $name, value: $value)
|
||||
}
|
||||
`,
|
||||
variables: { name, value },
|
||||
});
|
||||
};
|
||||
|
||||
function updateTopUserConfiguration(select) {
|
||||
if (
|
||||
ccconfig[`status_view_selectedTopUserCategory:${cluster}`] != select
|
||||
) {
|
||||
updateConfigurationMutation({
|
||||
name: `status_view_selectedTopUserCategory:${cluster}`,
|
||||
value: JSON.stringify(select),
|
||||
}).subscribe((res) => {
|
||||
if (res.fetching === false && !res.error) {
|
||||
// console.log(`status_view_selectedTopUserCategory:${cluster}` + ' -> Updated!')
|
||||
} else if (res.fetching === false && res.error) {
|
||||
throw res.error;
|
||||
}
|
||||
});
|
||||
} else {
|
||||
// console.log('No Mutation Required: Top User')
|
||||
}
|
||||
}
|
||||
|
||||
function updateTopProjectConfiguration(select) {
|
||||
if (
|
||||
ccconfig[`status_view_selectedTopProjectCategory:${cluster}`] !=
|
||||
select
|
||||
) {
|
||||
updateConfigurationMutation({
|
||||
name: `status_view_selectedTopProjectCategory:${cluster}`,
|
||||
value: JSON.stringify(select),
|
||||
}).subscribe((res) => {
|
||||
if (res.fetching === false && !res.error) {
|
||||
// console.log(`status_view_selectedTopProjectCategory:${cluster}` + ' -> Updated!')
|
||||
} else if (res.fetching === false && res.error) {
|
||||
throw res.error;
|
||||
}
|
||||
});
|
||||
} else {
|
||||
// console.log('No Mutation Required: Top Project')
|
||||
}
|
||||
}
|
||||
|
||||
$: updateTopUserConfiguration(topUserSelection.key);
|
||||
$: updateTopProjectConfiguration(topProjectSelection.key);
|
||||
</script>
|
||||
|
||||
<!-- Loading indicator & Refresh -->
|
||||
|
||||
<Row>
|
||||
<Col xs="auto" style="align-self: flex-end;">
|
||||
<h4 class="mb-0" >Current utilization of cluster "{cluster}"</h4>
|
||||
<h4 class="mb-0">Current utilization of cluster "{cluster}"</h4>
|
||||
</Col>
|
||||
<Col xs="auto">
|
||||
{#if $initq.fetching || $mainQuery.fetching}
|
||||
<Spinner/>
|
||||
<Spinner />
|
||||
{:else if $initq.error}
|
||||
<Card body color="danger">{$initq.error.message}</Card>
|
||||
{:else}
|
||||
@ -83,10 +321,13 @@
|
||||
{/if}
|
||||
</Col>
|
||||
<Col xs="auto" style="margin-left: auto;">
|
||||
<Refresher initially={120} on:reload={() => {
|
||||
from = new Date(Date.now() - 5 * 60 * 1000)
|
||||
to = new Date(Date.now())
|
||||
}} />
|
||||
<Refresher
|
||||
initially={120}
|
||||
on:reload={() => {
|
||||
from = new Date(Date.now() - 5 * 60 * 1000);
|
||||
to = new Date(Date.now());
|
||||
}}
|
||||
/>
|
||||
</Col>
|
||||
</Row>
|
||||
{#if $mainQuery.error}
|
||||
@ -97,43 +338,85 @@
|
||||
</Row>
|
||||
{/if}
|
||||
|
||||
<hr>
|
||||
<hr />
|
||||
|
||||
<!-- Gauges & Roofline per Subcluster-->
|
||||
|
||||
{#if $initq.data && $mainQuery.data}
|
||||
{#each $initq.data.clusters.find(c => c.name == cluster).subClusters as subCluster, i}
|
||||
<Row cols={2} class="mb-3 justify-content-center">
|
||||
<Col xs="4" class="px-3">
|
||||
{#each $initq.data.clusters.find((c) => c.name == cluster).subClusters as subCluster, i}
|
||||
<Row class="mb-3 justify-content-center">
|
||||
<Col md="4" class="px-3">
|
||||
<Card class="h-auto mt-1">
|
||||
<CardHeader>
|
||||
<CardTitle class="mb-0">SubCluster "{subCluster.name}"</CardTitle>
|
||||
<CardTitle class="mb-0"
|
||||
>SubCluster "{subCluster.name}"</CardTitle
|
||||
>
|
||||
</CardHeader>
|
||||
<CardBody>
|
||||
<Table borderless>
|
||||
<tr class="py-2">
|
||||
<th scope="col">Allocated Nodes</th>
|
||||
<td style="min-width: 100px;"><div class="col"><Progress value={allocatedNodes[subCluster.name]} max={subCluster.numberOfNodes}/></div></td>
|
||||
<td>{allocatedNodes[subCluster.name]} / {subCluster.numberOfNodes} Nodes</td>
|
||||
<td style="min-width: 100px;"
|
||||
><div class="col">
|
||||
<Progress
|
||||
value={allocatedNodes[
|
||||
subCluster.name
|
||||
]}
|
||||
max={subCluster.numberOfNodes}
|
||||
/>
|
||||
</div></td
|
||||
>
|
||||
<td
|
||||
>{allocatedNodes[subCluster.name]} / {subCluster.numberOfNodes}
|
||||
Nodes</td
|
||||
>
|
||||
</tr>
|
||||
<tr class="py-2">
|
||||
<th scope="col">Flop Rate (Any) <Icon name="info-circle" class="p-1" style="cursor: help;" title="Flops[Any] = (Flops[Double] x 2) + Flops[Single]"/></th>
|
||||
<td style="min-width: 100px;"><div class="col"><Progress value={flopRate[subCluster.name]} max={subCluster.flopRateSimd.value * subCluster.numberOfNodes}/></div></td>
|
||||
<th scope="col"
|
||||
>Flop Rate (Any) <Icon
|
||||
name="info-circle"
|
||||
class="p-1"
|
||||
style="cursor: help;"
|
||||
title="Flops[Any] = (Flops[Double] x 2) + Flops[Single]"
|
||||
/></th
|
||||
>
|
||||
<td style="min-width: 100px;"
|
||||
><div class="col">
|
||||
<Progress
|
||||
value={flopRate[subCluster.name]}
|
||||
max={subCluster.flopRateSimd.value *
|
||||
subCluster.numberOfNodes}
|
||||
/>
|
||||
</div></td
|
||||
>
|
||||
<td>
|
||||
{scaleNumbers(flopRate[subCluster.name],
|
||||
(subCluster.flopRateSimd.value * subCluster.numberOfNodes),
|
||||
flopRateUnitPrefix[subCluster.name])
|
||||
}{flopRateUnitBase[subCluster.name]} [Max]
|
||||
{scaleNumbers(
|
||||
flopRate[subCluster.name],
|
||||
subCluster.flopRateSimd.value *
|
||||
subCluster.numberOfNodes,
|
||||
flopRateUnitPrefix[subCluster.name]
|
||||
)}{flopRateUnitBase[subCluster.name]} [Max]
|
||||
</td>
|
||||
</tr>
|
||||
<tr class="py-2">
|
||||
<th scope="col">MemBw Rate</th>
|
||||
<td style="min-width: 100px;"><div class="col"><Progress value={memBwRate[subCluster.name]} max={subCluster.memoryBandwidth.value * subCluster.numberOfNodes}/></div></td>
|
||||
<td style="min-width: 100px;"
|
||||
><div class="col">
|
||||
<Progress
|
||||
value={memBwRate[subCluster.name]}
|
||||
max={subCluster.memoryBandwidth
|
||||
.value *
|
||||
subCluster.numberOfNodes}
|
||||
/>
|
||||
</div></td
|
||||
>
|
||||
<td>
|
||||
{scaleNumbers(memBwRate[subCluster.name],
|
||||
(subCluster.memoryBandwidth.value * subCluster.numberOfNodes),
|
||||
memBwRateUnitPrefix[subCluster.name])
|
||||
}{memBwRateUnitBase[subCluster.name]} [Max]
|
||||
{scaleNumbers(
|
||||
memBwRate[subCluster.name],
|
||||
subCluster.memoryBandwidth.value *
|
||||
subCluster.numberOfNodes,
|
||||
memBwRateUnitPrefix[subCluster.name]
|
||||
)}{memBwRateUnitBase[subCluster.name]} [Max]
|
||||
</td>
|
||||
</tr>
|
||||
</Table>
|
||||
@ -144,85 +427,241 @@
|
||||
<div bind:clientWidth={plotWidths[i]}>
|
||||
{#key $mainQuery.data.nodeMetrics}
|
||||
<Roofline
|
||||
width={plotWidths[i] - 10} height={300} colorDots={true} showTime={false} cluster={subCluster}
|
||||
data={transformPerNodeData($mainQuery.data.nodeMetrics.filter(data => data.subCluster == subCluster.name))} />
|
||||
width={plotWidths[i] - 10}
|
||||
height={300}
|
||||
colorDots={true}
|
||||
showTime={false}
|
||||
cluster={subCluster}
|
||||
data={transformPerNodeData(
|
||||
$mainQuery.data.nodeMetrics.filter(
|
||||
(data) => data.subCluster == subCluster.name
|
||||
)
|
||||
)}
|
||||
/>
|
||||
{/key}
|
||||
</div>
|
||||
</Col>
|
||||
</Row>
|
||||
{/each}
|
||||
|
||||
<hr style="margin-top: -1em;">
|
||||
<hr style="margin-top: -1em;" />
|
||||
|
||||
<!-- Usage Stats as Histograms -->
|
||||
|
||||
<Row cols={4}>
|
||||
<Row>
|
||||
<Col class="p-2">
|
||||
<div bind:clientWidth={colWidth1}>
|
||||
<h4 class="mb-3 text-center">Top Users</h4>
|
||||
{#key $mainQuery.data}
|
||||
<Histogram
|
||||
width={colWidth1 - 25}
|
||||
data={$mainQuery.data.topUsers.sort((a, b) => b.count - a.count).map(({ count }, idx) => ({ count, value: idx }))}
|
||||
label={(x) => x < $mainQuery.data.topUsers.length ? $mainQuery.data.topUsers[Math.floor(x)].name : '0'}
|
||||
xlabel="User Name" ylabel="Number of Jobs" />
|
||||
<h4 class="text-center">
|
||||
Top Users on {cluster.charAt(0).toUpperCase() +
|
||||
cluster.slice(1)}
|
||||
</h4>
|
||||
{#key $topUserQuery.data}
|
||||
{#if $topUserQuery.fetching}
|
||||
<Spinner />
|
||||
{:else if $topUserQuery.error}
|
||||
<Card body color="danger"
|
||||
>{$topUserQuery.error.message}</Card
|
||||
>
|
||||
{:else}
|
||||
<Pie
|
||||
size={colWidth1}
|
||||
sliceLabel={topUserSelection.label}
|
||||
quantities={$topUserQuery.data.topUser.map(
|
||||
(tu) => tu[topUserSelection.key]
|
||||
)}
|
||||
entities={$topUserQuery.data.topUser.map(
|
||||
(tu) => tu.id
|
||||
)}
|
||||
/>
|
||||
{/if}
|
||||
{/key}
|
||||
</div>
|
||||
</Col>
|
||||
<Col class="px-4 py-2">
|
||||
<Table>
|
||||
<tr class="mb-2"><th>User Name</th><th>Number of Nodes</th></tr>
|
||||
{#each $mainQuery.data.topUsers.sort((a, b) => b.count - a.count) as { name, count }}
|
||||
<tr>
|
||||
<th scope="col"><a href="/monitoring/user/{name}?cluster={cluster}&state=running">{name}</a></th>
|
||||
<td>{count}</td>
|
||||
</tr>
|
||||
{/each}
|
||||
</Table>
|
||||
{#key $topUserQuery.data}
|
||||
{#if $topUserQuery.fetching}
|
||||
<Spinner />
|
||||
{:else if $topUserQuery.error}
|
||||
<Card body color="danger"
|
||||
>{$topUserQuery.error.message}</Card
|
||||
>
|
||||
{:else}
|
||||
<Table>
|
||||
<tr class="mb-2">
|
||||
<th>Legend</th>
|
||||
<th>User Name</th>
|
||||
<th
|
||||
>Number of
|
||||
<select
|
||||
class="p-0"
|
||||
bind:value={topUserSelection}
|
||||
>
|
||||
{#each topOptions as option}
|
||||
<option value={option}>
|
||||
{option.label}
|
||||
</option>
|
||||
{/each}
|
||||
</select>
|
||||
</th>
|
||||
</tr>
|
||||
{#each $topUserQuery.data.topUser as tu, i}
|
||||
<tr>
|
||||
<td
|
||||
><Icon
|
||||
name="circle-fill"
|
||||
style="color: {colors[i]};"
|
||||
/></td
|
||||
>
|
||||
<th scope="col"
|
||||
><a
|
||||
href="/monitoring/user/{tu.id}?cluster={cluster}&state=running"
|
||||
>{tu.id}</a
|
||||
></th
|
||||
>
|
||||
<td>{tu[topUserSelection.key]}</td>
|
||||
</tr>
|
||||
{/each}
|
||||
</Table>
|
||||
{/if}
|
||||
{/key}
|
||||
</Col>
|
||||
<Col class="p-2">
|
||||
<h4 class="mb-3 text-center">Top Projects</h4>
|
||||
{#key $mainQuery.data}
|
||||
<Histogram
|
||||
width={colWidth1 - 25}
|
||||
data={$mainQuery.data.topProjects.sort((a, b) => b.count - a.count).map(({ count }, idx) => ({ count, value: idx }))}
|
||||
label={(x) => x < $mainQuery.data.topProjects.length ? $mainQuery.data.topProjects[Math.floor(x)].name : '0'}
|
||||
xlabel="Project Code" ylabel="Number of Jobs" />
|
||||
<h4 class="text-center">
|
||||
Top Projects on {cluster.charAt(0).toUpperCase() +
|
||||
cluster.slice(1)}
|
||||
</h4>
|
||||
{#key $topProjectQuery.data}
|
||||
{#if $topProjectQuery.fetching}
|
||||
<Spinner />
|
||||
{:else if $topProjectQuery.error}
|
||||
<Card body color="danger"
|
||||
>{$topProjectQuery.error.message}</Card
|
||||
>
|
||||
{:else}
|
||||
<Pie
|
||||
size={colWidth1}
|
||||
sliceLabel={topProjectSelection.label}
|
||||
quantities={$topProjectQuery.data.topProjects.map(
|
||||
(tp) => tp[topProjectSelection.key]
|
||||
)}
|
||||
entities={$topProjectQuery.data.topProjects.map(
|
||||
(tp) => tp.id
|
||||
)}
|
||||
/>
|
||||
{/if}
|
||||
{/key}
|
||||
</Col>
|
||||
<Col class="px-4 py-2">
|
||||
<Table>
|
||||
<tr class="mb-2"><th>Project Code</th><th>Number of Nodes</th></tr>
|
||||
{#each $mainQuery.data.topProjects.sort((a, b) => b.count - a.count) as { name, count }}
|
||||
<tr>
|
||||
<th scope="col"><a href="/monitoring/jobs/?cluster={cluster}&state=running&project={name}&projectMatch=eq">{name}</a></th>
|
||||
<td>{count}</td>
|
||||
</tr>
|
||||
{/each}
|
||||
</Table>
|
||||
{#key $topProjectQuery.data}
|
||||
{#if $topProjectQuery.fetching}
|
||||
<Spinner />
|
||||
{:else if $topProjectQuery.error}
|
||||
<Card body color="danger"
|
||||
>{$topProjectQuery.error.message}</Card
|
||||
>
|
||||
{:else}
|
||||
<Table>
|
||||
<tr class="mb-2">
|
||||
<th>Legend</th>
|
||||
<th>Project Code</th>
|
||||
<th
|
||||
>Number of
|
||||
<select
|
||||
class="p-0"
|
||||
bind:value={topProjectSelection}
|
||||
>
|
||||
{#each topOptions as option}
|
||||
<option value={option}>
|
||||
{option.label}
|
||||
</option>
|
||||
{/each}
|
||||
</select>
|
||||
</th>
|
||||
</tr>
|
||||
{#each $topProjectQuery.data.topProjects as tp, i}
|
||||
<tr>
|
||||
<td
|
||||
><Icon
|
||||
name="circle-fill"
|
||||
style="color: {colors[i]};"
|
||||
/></td
|
||||
>
|
||||
<th scope="col"
|
||||
><a
|
||||
href="/monitoring/jobs/?cluster={cluster}&state=running&project={tp.id}&projectMatch=eq"
|
||||
>{tp.id}</a
|
||||
></th
|
||||
>
|
||||
<td>{tp[topProjectSelection.key]}</td>
|
||||
</tr>
|
||||
{/each}
|
||||
</Table>
|
||||
{/if}
|
||||
{/key}
|
||||
</Col>
|
||||
</Row>
|
||||
<Row cols={2} class="mt-3">
|
||||
<hr class="my-2" />
|
||||
<Row>
|
||||
<Col class="p-2">
|
||||
<div bind:clientWidth={colWidth2}>
|
||||
<h4 class="mb-3 text-center">Duration Distribution</h4>
|
||||
{#key $mainQuery.data.stats}
|
||||
<Histogram
|
||||
data={convert2uplot(
|
||||
$mainQuery.data.stats[0].histDuration
|
||||
)}
|
||||
width={colWidth2 - 25}
|
||||
data={$mainQuery.data.stats[0].histDuration}
|
||||
xlabel="Current Runtimes [h]"
|
||||
ylabel="Number of Jobs" />
|
||||
title="Duration Distribution"
|
||||
xlabel="Current Runtimes"
|
||||
xunit="Hours"
|
||||
ylabel="Number of Jobs"
|
||||
yunit="Jobs"
|
||||
/>
|
||||
{/key}
|
||||
</div>
|
||||
</Col>
|
||||
<Col class="p-2">
|
||||
<h4 class="mb-3 text-center">Number of Nodes Distribution</h4>
|
||||
{#key $mainQuery.data.stats}
|
||||
<Histogram
|
||||
data={convert2uplot($mainQuery.data.stats[0].histNumNodes)}
|
||||
width={colWidth2 - 25}
|
||||
data={$mainQuery.data.stats[0].histNumNodes}
|
||||
xlabel="Allocated Nodes [#]"
|
||||
ylabel="Number of Jobs" />
|
||||
title="Number of Nodes Distribution"
|
||||
xlabel="Allocated Nodes"
|
||||
xunit="Nodes"
|
||||
ylabel="Number of Jobs"
|
||||
yunit="Jobs"
|
||||
/>
|
||||
{/key}
|
||||
</Col>
|
||||
</Row>
|
||||
<Row cols={2}>
|
||||
<Col class="p-2">
|
||||
<div bind:clientWidth={colWidth2}>
|
||||
{#key $mainQuery.data.stats}
|
||||
<Histogram
|
||||
data={convert2uplot(
|
||||
$mainQuery.data.stats[0].histNumCores
|
||||
)}
|
||||
width={colWidth2 - 25}
|
||||
title="Number of Cores Distribution"
|
||||
xlabel="Allocated Cores"
|
||||
xunit="Cores"
|
||||
ylabel="Number of Jobs"
|
||||
yunit="Jobs"
|
||||
/>
|
||||
{/key}
|
||||
</div>
|
||||
</Col>
|
||||
<Col class="p-2">
|
||||
{#key $mainQuery.data.stats}
|
||||
<Histogram
|
||||
data={convert2uplot($mainQuery.data.stats[0].histNumAccs)}
|
||||
width={colWidth2 - 25}
|
||||
title="Number of Accelerators Distribution"
|
||||
xlabel="Allocated Accs"
|
||||
xunit="Accs"
|
||||
ylabel="Number of Jobs"
|
||||
yunit="Jobs"
|
||||
/>
|
||||
{/key}
|
||||
</Col>
|
||||
</Row>
|
||||
|
@ -1,5 +1,6 @@
|
||||
<script>
|
||||
import { init, checkMetricDisabled } from './utils.js'
|
||||
import Refresher from './joblist/Refresher.svelte'
|
||||
import { Row, Col, Input, InputGroup, InputGroupText, Icon, Spinner, Card } from 'sveltestrap'
|
||||
import { queryStore, gql, getContextClient } from '@urql/svelte'
|
||||
import TimeSelection from './filters/TimeSelection.svelte'
|
||||
@ -78,6 +79,13 @@
|
||||
{:else if $initq.fetching}
|
||||
<Spinner/>
|
||||
{:else}
|
||||
<Col>
|
||||
<Refresher on:reload={() => {
|
||||
const diff = Date.now() - to
|
||||
from = new Date(from.getTime() + diff)
|
||||
to = new Date(to.getTime() + diff)
|
||||
}} />
|
||||
</Col>
|
||||
<Col>
|
||||
<TimeSelection
|
||||
bind:from={from}
|
||||
@ -136,7 +144,9 @@
|
||||
series={item.data.metric.series}
|
||||
metric={item.data.name}
|
||||
cluster={clusters.find(c => c.name == cluster)}
|
||||
subCluster={item.subCluster} />
|
||||
subCluster={item.subCluster}
|
||||
resources={[{hostname: item.host}]}
|
||||
forNode={true}/>
|
||||
{:else if item.disabled === true && item.data}
|
||||
<Card style="margin-left: 2rem;margin-right: 2rem;" body color="info">Metric disabled for subcluster <code>{selectedMetric}:{item.subCluster}</code></Card>
|
||||
{:else}
|
||||
|
@ -1,6 +1,6 @@
|
||||
<script>
|
||||
import { onMount, getContext } from 'svelte'
|
||||
import { init } from './utils.js'
|
||||
import { init, convert2uplot } from './utils.js'
|
||||
import { Table, Row, Col, Button, Icon, Card, Spinner, Input } from 'sveltestrap'
|
||||
import { queryStore, gql, getContextClient } from '@urql/svelte'
|
||||
import Filters from './filters/Filters.svelte'
|
||||
@ -25,13 +25,6 @@
|
||||
let metrics = ccconfig.plot_list_selectedMetrics, isMetricsSelectionOpen = false
|
||||
let w1, w2, histogramHeight = 250
|
||||
let selectedCluster = filterPresets?.cluster ? filterPresets.cluster : null
|
||||
let resize = false
|
||||
/* Resize Context
|
||||
* A) Each viewport change triggers histogram rerender due to variable dimensions clearing canvas if not rerendered
|
||||
* B) Opening filters (and some other things) triggers small change in viewport dimensions (Fix here?)
|
||||
* A+B) Histogram rerenders if filters opened, high performance impact if dataload heavy
|
||||
* Solution: Default to fixed histogram dimensions, allow user to enable automatic resizing
|
||||
*/
|
||||
|
||||
const client = getContextClient();
|
||||
$: stats = queryStore({
|
||||
@ -137,47 +130,31 @@
|
||||
<th scope="row">Total Core Hours</th>
|
||||
<td>{$stats.data.jobsStatistics[0].totalCoreHours}</td>
|
||||
</tr>
|
||||
<!-- <tr>
|
||||
<th scope="row">Toggle Histogram Resizing</th>
|
||||
<td><Input id="c3" value={resize} type="switch" on:change={() => (resize = !resize)}/></td>
|
||||
</tr> -->
|
||||
</tbody>
|
||||
</Table>
|
||||
</Col>
|
||||
<div class="col-4" style="text-align: center;" bind:clientWidth={w1}>
|
||||
<b>Duration Distribution</b>
|
||||
<div class="col-4 text-center" bind:clientWidth={w1}>
|
||||
{#key $stats.data.jobsStatistics[0].histDuration}
|
||||
{#if resize == true}
|
||||
<Histogram
|
||||
data={$stats.data.jobsStatistics[0].histDuration}
|
||||
data={convert2uplot($stats.data.jobsStatistics[0].histDuration)}
|
||||
width={w1 - 25} height={histogramHeight}
|
||||
xlabel="Current Runtimes [h]"
|
||||
ylabel="Number of Jobs"/>
|
||||
{:else}
|
||||
<Histogram
|
||||
data={$stats.data.jobsStatistics[0].histDuration}
|
||||
width={400} height={250}
|
||||
xlabel="Current Runtimes [h]"
|
||||
ylabel="Number of Jobs"/>
|
||||
{/if}
|
||||
title="Duration Distribution"
|
||||
xlabel="Current Runtimes"
|
||||
xunit="Hours"
|
||||
ylabel="Number of Jobs"
|
||||
yunit="Jobs"/>
|
||||
{/key}
|
||||
</div>
|
||||
<div class="col-4" style="text-align: center;" bind:clientWidth={w2}>
|
||||
<b>Number of Nodes Distribution</b>
|
||||
<div class="col-4 text-center" bind:clientWidth={w2}>
|
||||
{#key $stats.data.jobsStatistics[0].histNumNodes}
|
||||
{#if resize == true}
|
||||
<Histogram
|
||||
data={$stats.data.jobsStatistics[0].histNumNodes}
|
||||
data={convert2uplot($stats.data.jobsStatistics[0].histNumNodes)}
|
||||
width={w2 - 25} height={histogramHeight}
|
||||
xlabel="Allocated Nodes [#]"
|
||||
ylabel="Number of Jobs" />
|
||||
{:else}
|
||||
<Histogram
|
||||
data={$stats.data.jobsStatistics[0].histNumNodes}
|
||||
width={400} height={250}
|
||||
xlabel="Allocated Nodes [#]"
|
||||
ylabel="Number of Jobs" />
|
||||
{/if}
|
||||
title="Number of Nodes Distribution"
|
||||
xlabel="Allocated Nodes"
|
||||
xunit="Nodes"
|
||||
ylabel="Number of Jobs"
|
||||
yunit="Jobs"/>
|
||||
{/key}
|
||||
</div>
|
||||
{/if}
|
||||
|
@ -7,6 +7,7 @@
|
||||
export let customEnabled = true
|
||||
export let anyEnabled = false
|
||||
export let options = {
|
||||
'Last quarter hour': 15*60,
|
||||
'Last half hour': 30*60,
|
||||
'Last hour': 60*60,
|
||||
'Last 2hrs': 2*60*60,
|
||||
|
@ -136,6 +136,7 @@
|
||||
{cluster}
|
||||
subCluster={job.subCluster}
|
||||
isShared={(job.exclusive != 1)}
|
||||
resources={job.resources}
|
||||
/>
|
||||
{:else if metric.disabled == true && metric.data}
|
||||
<Card body color="info">Metric disabled for subcluster <code>{metric.data.name}:{job.subCluster}</code></Card>
|
||||
|
@ -1,229 +1,216 @@
|
||||
<!--
|
||||
@component
|
||||
Properties:
|
||||
- width, height: Number
|
||||
- min, max: Number
|
||||
- label: (x-Value) => String
|
||||
- data: [{ value: Number, count: Number }]
|
||||
- Todo
|
||||
-->
|
||||
|
||||
<div
|
||||
on:mousemove={mousemove}
|
||||
on:mouseleave={() => (infoText = '')}>
|
||||
<span style="left: {paddingLeft + 5}px;">{infoText}</span>
|
||||
<canvas bind:this={canvasElement} width="{width}" height="{height}"></canvas>
|
||||
</div>
|
||||
|
||||
<script>
|
||||
import { onMount } from 'svelte'
|
||||
import uPlot from 'uplot'
|
||||
import { formatNumber } from '../units.js'
|
||||
import { onMount, onDestroy } from 'svelte'
|
||||
import { Card } from 'sveltestrap'
|
||||
|
||||
export let data
|
||||
export let width = 500
|
||||
export let height = 300
|
||||
export let title = ''
|
||||
export let xlabel = ''
|
||||
export let xunit = 'X'
|
||||
export let ylabel = ''
|
||||
export let min = null
|
||||
export let max = null
|
||||
export let small = false
|
||||
export let label = formatNumber
|
||||
export let yunit = 'Y'
|
||||
|
||||
const fontSize = 12
|
||||
const fontFamily = 'system-ui, -apple-system, "Segoe UI", Roboto, "Helvetica Neue", Arial, "Noto Sans", sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol", "Noto Color Emoji"'
|
||||
const paddingLeft = 50, paddingRight = 20, paddingTop = 20, paddingBottom = 20
|
||||
const { bars } = uPlot.paths
|
||||
|
||||
let ctx, canvasElement
|
||||
const drawStyles = {
|
||||
bars: 1,
|
||||
points: 2,
|
||||
};
|
||||
|
||||
const maxCount = data.reduce((max, point) => Math.max(max, point.count), 0),
|
||||
maxValue = data.reduce((max, point) => Math.max(max, point.value), 0.1)
|
||||
function paths(u, seriesIdx, idx0, idx1, extendGap, buildClip) {
|
||||
let s = u.series[seriesIdx];
|
||||
let style = s.drawStyle;
|
||||
|
||||
function getStepSize(valueRange, pixelRange, minSpace) {
|
||||
const proposition = valueRange / (pixelRange / minSpace)
|
||||
const getStepSize = n => Math.pow(10, Math.floor(n / 3)) *
|
||||
(n < 0 ? [1., 5., 2.][-n % 3] : [1., 2., 5.][n % 3])
|
||||
let renderer = ( // If bars to wide, change here
|
||||
style == drawStyles.bars ? (
|
||||
bars({size: [0.75, 100]})
|
||||
) :
|
||||
() => null
|
||||
)
|
||||
|
||||
let n = 0
|
||||
let stepsize = getStepSize(n)
|
||||
while (true) {
|
||||
let bigger = getStepSize(n + 1)
|
||||
if (proposition > bigger) {
|
||||
n += 1
|
||||
stepsize = bigger
|
||||
} else {
|
||||
return stepsize
|
||||
return renderer(u, seriesIdx, idx0, idx1, extendGap, buildClip);
|
||||
}
|
||||
|
||||
// converts the legend into a simple tooltip
|
||||
function legendAsTooltipPlugin({ className, style = { backgroundColor:"rgba(255, 249, 196, 0.92)", color: "black" } } = {}) {
|
||||
let legendEl;
|
||||
|
||||
function init(u, opts) {
|
||||
legendEl = u.root.querySelector(".u-legend");
|
||||
|
||||
legendEl.classList.remove("u-inline");
|
||||
className && legendEl.classList.add(className);
|
||||
|
||||
uPlot.assign(legendEl.style, {
|
||||
textAlign: "left",
|
||||
pointerEvents: "none",
|
||||
display: "none",
|
||||
position: "absolute",
|
||||
left: 0,
|
||||
top: 0,
|
||||
zIndex: 100,
|
||||
boxShadow: "2px 2px 10px rgba(0,0,0,0.5)",
|
||||
...style
|
||||
});
|
||||
|
||||
// hide series color markers
|
||||
const idents = legendEl.querySelectorAll(".u-marker");
|
||||
|
||||
for (let i = 0; i < idents.length; i++)
|
||||
idents[i].style.display = "none";
|
||||
|
||||
const overEl = u.over;
|
||||
overEl.style.overflow = "visible";
|
||||
|
||||
// move legend into plot bounds
|
||||
overEl.appendChild(legendEl);
|
||||
|
||||
// show/hide tooltip on enter/exit
|
||||
overEl.addEventListener("mouseenter", () => {legendEl.style.display = null;});
|
||||
overEl.addEventListener("mouseleave", () => {legendEl.style.display = "none";});
|
||||
|
||||
// let tooltip exit plot
|
||||
// overEl.style.overflow = "visible";
|
||||
}
|
||||
|
||||
function update(u) {
|
||||
const { left, top } = u.cursor;
|
||||
legendEl.style.transform = "translate(" + (left + 15) + "px, " + (top + 15) + "px)";
|
||||
}
|
||||
|
||||
return {
|
||||
hooks: {
|
||||
init: init,
|
||||
setCursor: update,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let infoText = ''
|
||||
function mousemove(event) {
|
||||
let rect = event.target.getBoundingClientRect()
|
||||
let x = event.clientX - rect.left
|
||||
if (x < paddingLeft || x > width - paddingRight) {
|
||||
infoText = ''
|
||||
return
|
||||
}
|
||||
|
||||
const w = width - paddingLeft - paddingRight
|
||||
const barWidth = Math.round(w / (maxValue + 1))
|
||||
x = Math.floor((x - paddingLeft) / (w - barWidth) * maxValue)
|
||||
let point = data.find(point => point.value == x)
|
||||
|
||||
if (point)
|
||||
infoText = `count: ${point.count} (value: ${label(x)})`
|
||||
else
|
||||
infoText = ''
|
||||
};
|
||||
}
|
||||
|
||||
let plotWrapper = null
|
||||
let uplot = null
|
||||
let timeoutId = null
|
||||
|
||||
function render() {
|
||||
const labelOffset = Math.floor(height * 0.1)
|
||||
const h = height - paddingTop - paddingBottom - labelOffset
|
||||
const w = width - paddingLeft - paddingRight
|
||||
const barGap = 5
|
||||
const barWidth = Math.ceil(w / (maxValue + 1)) - barGap
|
||||
let opts = {
|
||||
width: width,
|
||||
height: height,
|
||||
title: title,
|
||||
plugins: [
|
||||
legendAsTooltipPlugin()
|
||||
],
|
||||
cursor: {
|
||||
points: {
|
||||
size: (u, seriesIdx) => u.series[seriesIdx].points.size * 2.5,
|
||||
width: (u, seriesIdx, size) => size / 4,
|
||||
stroke: (u, seriesIdx) => u.series[seriesIdx].points.stroke(u, seriesIdx) + '90',
|
||||
fill: (u, seriesIdx) => "#fff",
|
||||
}
|
||||
},
|
||||
scales: {
|
||||
x: {
|
||||
time: false
|
||||
},
|
||||
},
|
||||
axes: [
|
||||
{
|
||||
stroke: "#000000",
|
||||
// scale: 'x',
|
||||
label: xlabel,
|
||||
labelGap: 10,
|
||||
size: 25,
|
||||
incrs: [1, 2, 5, 6, 10, 12, 50, 100, 500, 1000, 5000, 10000],
|
||||
border: {
|
||||
show: true,
|
||||
stroke: "#000000",
|
||||
},
|
||||
ticks: {
|
||||
width: 1 / devicePixelRatio,
|
||||
size: 5 / devicePixelRatio,
|
||||
stroke: "#000000",
|
||||
},
|
||||
values: (_, t) => t.map(v => formatNumber(v)),
|
||||
},
|
||||
{
|
||||
stroke: "#000000",
|
||||
// scale: 'y',
|
||||
label: ylabel,
|
||||
labelGap: 10,
|
||||
size: 35,
|
||||
border: {
|
||||
show: true,
|
||||
stroke: "#000000",
|
||||
},
|
||||
ticks: {
|
||||
width: 1 / devicePixelRatio,
|
||||
size: 5 / devicePixelRatio,
|
||||
stroke: "#000000",
|
||||
},
|
||||
values: (_, t) => t.map(v => formatNumber(v)),
|
||||
},
|
||||
],
|
||||
series: [
|
||||
{
|
||||
label: xunit !== '' ? xunit : null,
|
||||
},
|
||||
Object.assign({
|
||||
label: yunit !== '' ? yunit : null,
|
||||
width: 1 / devicePixelRatio,
|
||||
drawStyle: drawStyles.points,
|
||||
lineInterpolation: null,
|
||||
paths,
|
||||
}, {
|
||||
drawStyle: drawStyles.bars,
|
||||
lineInterpolation: null,
|
||||
stroke: "#85abce",
|
||||
fill: "#85abce", // + "1A", // Transparent Fill
|
||||
}),
|
||||
]
|
||||
};
|
||||
|
||||
if (Number.isNaN(barWidth))
|
||||
return
|
||||
uplot = new uPlot(opts, data, plotWrapper)
|
||||
}
|
||||
|
||||
const getCanvasX = (value) => (value / maxValue) * (w - barWidth) + paddingLeft + (barWidth / 2.)
|
||||
const getCanvasY = (count) => (h - (count / maxCount) * h) + paddingTop
|
||||
|
||||
// X Axis
|
||||
ctx.font = `bold ${fontSize}px ${fontFamily}`
|
||||
ctx.fillStyle = 'black'
|
||||
if (xlabel != '') {
|
||||
let textWidth = ctx.measureText(xlabel).width
|
||||
ctx.fillText(xlabel, Math.floor((width / 2) - (textWidth / 2) + barGap), height - Math.floor(labelOffset / 2))
|
||||
}
|
||||
ctx.textAlign = 'center'
|
||||
ctx.font = `${fontSize}px ${fontFamily}`
|
||||
if (min != null && max != null) {
|
||||
const stepsizeX = getStepSize(max - min, w, 75)
|
||||
let startX = 0
|
||||
while (startX < min)
|
||||
startX += stepsizeX
|
||||
|
||||
for (let x = startX; x < max; x += stepsizeX) {
|
||||
let px = ((x - min) / (max - min)) * (w - barWidth) + paddingLeft + (barWidth / 2.)
|
||||
ctx.fillText(`${formatNumber(x)}`, px, height - paddingBottom - Math.floor(labelOffset / 2))
|
||||
}
|
||||
} else {
|
||||
const stepsizeX = getStepSize(maxValue, w, 120)
|
||||
for (let x = 0; x <= maxValue; x += stepsizeX) {
|
||||
ctx.fillText(label(x), getCanvasX(x), height - paddingBottom - Math.floor(labelOffset / (small ? 8 : 2)))
|
||||
}
|
||||
}
|
||||
|
||||
// Y Axis
|
||||
ctx.fillStyle = 'black'
|
||||
ctx.strokeStyle = '#bbbbbb'
|
||||
ctx.font = `bold ${fontSize}px ${fontFamily}`
|
||||
if (ylabel != '') {
|
||||
ctx.save()
|
||||
ctx.translate(15, Math.floor(h / 2))
|
||||
ctx.rotate(-Math.PI / 2)
|
||||
ctx.fillText(ylabel, 0, 0)
|
||||
ctx.restore()
|
||||
}
|
||||
ctx.textAlign = 'right'
|
||||
ctx.font = `${fontSize}px ${fontFamily}`
|
||||
ctx.beginPath()
|
||||
const stepsizeY = getStepSize(maxCount, h, 50)
|
||||
for (let y = stepsizeY; y <= maxCount; y += stepsizeY) {
|
||||
const py = Math.floor(getCanvasY(y))
|
||||
ctx.fillText(`${formatNumber(y)}`, paddingLeft - 5, py)
|
||||
ctx.moveTo(paddingLeft, py)
|
||||
ctx.lineTo(width, py)
|
||||
}
|
||||
ctx.stroke()
|
||||
|
||||
// Draw bars
|
||||
ctx.fillStyle = '#85abce'
|
||||
for (let p of data) {
|
||||
ctx.fillRect(
|
||||
getCanvasX(p.value) - (barWidth / 2.),
|
||||
getCanvasY(p.count),
|
||||
barWidth,
|
||||
(p.count / maxCount) * h)
|
||||
}
|
||||
|
||||
// Fat lines left and below plotting area
|
||||
ctx.strokeStyle = 'black'
|
||||
ctx.beginPath()
|
||||
ctx.moveTo(0, height - paddingBottom - labelOffset)
|
||||
ctx.lineTo(width, height - paddingBottom - labelOffset)
|
||||
ctx.moveTo(paddingLeft, 0)
|
||||
ctx.lineTo(paddingLeft, height - Math.floor(labelOffset / 2))
|
||||
ctx.stroke()
|
||||
}
|
||||
|
||||
let mounted = false
|
||||
onMount(() => {
|
||||
mounted = true
|
||||
canvasElement.width = width
|
||||
canvasElement.height = height
|
||||
ctx = canvasElement.getContext('2d')
|
||||
render()
|
||||
})
|
||||
|
||||
let timeoutId = null;
|
||||
onDestroy(() => {
|
||||
if (uplot)
|
||||
uplot.destroy()
|
||||
|
||||
if (timeoutId != null)
|
||||
clearTimeout(timeoutId)
|
||||
})
|
||||
|
||||
function sizeChanged() {
|
||||
if (timeoutId != null)
|
||||
clearTimeout(timeoutId)
|
||||
|
||||
timeoutId = setTimeout(() => {
|
||||
timeoutId = null
|
||||
if (!canvasElement)
|
||||
return
|
||||
if (uplot)
|
||||
uplot.destroy()
|
||||
|
||||
canvasElement.width = width
|
||||
canvasElement.height = height
|
||||
ctx = canvasElement.getContext('2d')
|
||||
render()
|
||||
}, 250)
|
||||
}, 200)
|
||||
}
|
||||
|
||||
$: sizeChanged(width, height)
|
||||
</script>
|
||||
|
||||
<style>
|
||||
div {
|
||||
position: relative;
|
||||
}
|
||||
div > span {
|
||||
position: absolute;
|
||||
top: 0px;
|
||||
}
|
||||
</style>
|
||||
{#if data.length > 0}
|
||||
<div bind:this={plotWrapper}/>
|
||||
{:else}
|
||||
<Card class="mx-4" body color="warning">Cannot render histogram: No data!</Card>
|
||||
{/if}
|
||||
|
||||
<script context="module">
|
||||
import { formatNumber } from '../units.js'
|
||||
|
||||
export function binsFromFootprint(weights, values, numBins) {
|
||||
let min = 0, max = 0
|
||||
if (values.length != 0) {
|
||||
for (let x of values) {
|
||||
min = Math.min(min, x)
|
||||
max = Math.max(max, x)
|
||||
}
|
||||
max += 1 // So that we have an exclusive range.
|
||||
}
|
||||
|
||||
if (numBins == null || numBins < 3)
|
||||
numBins = 3
|
||||
|
||||
const bins = new Array(numBins).fill(0)
|
||||
for (let i = 0; i < values.length; i++)
|
||||
bins[Math.floor(((values[i] - min) / (max - min)) * numBins)] += weights ? weights[i] : 1
|
||||
|
||||
return {
|
||||
label: idx => {
|
||||
let start = min + (idx / numBins) * (max - min)
|
||||
let stop = min + ((idx + 1) / numBins) * (max - min)
|
||||
return `${formatNumber(start)} - ${formatNumber(stop)}`
|
||||
},
|
||||
bins: bins.map((count, idx) => ({ value: idx, count: count })),
|
||||
min: min,
|
||||
max: max
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
@ -26,17 +26,19 @@
|
||||
import { getContext, onMount, onDestroy } from 'svelte'
|
||||
import { Card } from 'sveltestrap'
|
||||
|
||||
export let metric
|
||||
export let scope = 'node'
|
||||
export let resources = []
|
||||
export let width
|
||||
export let height
|
||||
export let timestep
|
||||
export let series
|
||||
export let useStatsSeries = null
|
||||
export let statisticsSeries = null
|
||||
export let cluster
|
||||
export let subCluster
|
||||
export let metric
|
||||
export let useStatsSeries = null
|
||||
export let scope = 'node'
|
||||
export let isShared = false
|
||||
export let forNode = false
|
||||
|
||||
if (useStatsSeries == null)
|
||||
useStatsSeries = statisticsSeries != null
|
||||
@ -53,6 +55,70 @@
|
||||
const backgroundColors = { normal: 'rgba(255, 255, 255, 1.0)', caution: 'rgba(255, 128, 0, 0.3)', alert: 'rgba(255, 0, 0, 0.3)' }
|
||||
const thresholds = findThresholds(metricConfig, scope, typeof subCluster == 'string' ? cluster.subClusters.find(sc => sc.name == subCluster) : subCluster)
|
||||
|
||||
// converts the legend into a simple tooltip
|
||||
function legendAsTooltipPlugin({ className, style = { backgroundColor:"rgba(255, 249, 196, 0.92)", color: "black" } } = {}) {
|
||||
let legendEl;
|
||||
const dataSize = series.length
|
||||
|
||||
function init(u, opts) {
|
||||
legendEl = u.root.querySelector(".u-legend");
|
||||
|
||||
legendEl.classList.remove("u-inline");
|
||||
className && legendEl.classList.add(className);
|
||||
|
||||
uPlot.assign(legendEl.style, {
|
||||
textAlign: "left",
|
||||
pointerEvents: "none",
|
||||
display: "none",
|
||||
position: "absolute",
|
||||
left: 0,
|
||||
top: 0,
|
||||
zIndex: 100,
|
||||
boxShadow: "2px 2px 10px rgba(0,0,0,0.5)",
|
||||
...style
|
||||
});
|
||||
|
||||
// conditional hide series color markers:
|
||||
if (useStatsSeries === true || // Min/Max/Avg Self-Explanatory
|
||||
dataSize === 1 || // Only one Y-Dataseries
|
||||
dataSize > 6 ){ // More than 6 Y-Dataseries
|
||||
const idents = legendEl.querySelectorAll(".u-marker");
|
||||
for (let i = 0; i < idents.length; i++)
|
||||
idents[i].style.display = "none";
|
||||
}
|
||||
|
||||
const overEl = u.over;
|
||||
overEl.style.overflow = "visible";
|
||||
|
||||
// move legend into plot bounds
|
||||
overEl.appendChild(legendEl);
|
||||
|
||||
// show/hide tooltip on enter/exit
|
||||
overEl.addEventListener("mouseenter", () => {legendEl.style.display = null;});
|
||||
overEl.addEventListener("mouseleave", () => {legendEl.style.display = "none";});
|
||||
|
||||
// let tooltip exit plot
|
||||
// overEl.style.overflow = "visible";
|
||||
}
|
||||
|
||||
function update(u) {
|
||||
const { left, top } = u.cursor;
|
||||
const width = u.over.querySelector(".u-legend").offsetWidth;
|
||||
legendEl.style.transform = "translate(" + (left - width - 15) + "px, " + (top + 15) + "px)";
|
||||
}
|
||||
|
||||
if (dataSize <= 12 || useStatsSeries === true) {
|
||||
return {
|
||||
hooks: {
|
||||
init: init,
|
||||
setCursor: update,
|
||||
}
|
||||
}
|
||||
} else { // Setting legend-opts show/live as object with false here will not work ...
|
||||
return {}
|
||||
}
|
||||
}
|
||||
|
||||
function backgroundColor() {
|
||||
if (clusterCockpitConfig.plot_general_colorBackground == false
|
||||
|| !thresholds
|
||||
@ -88,24 +154,48 @@
|
||||
? statisticsSeries.mean.length
|
||||
: series.reduce((n, series) => Math.max(n, series.data.length), 0)
|
||||
const maxX = longestSeries * timestep
|
||||
const maxY = thresholds != null
|
||||
? useStatsSeries
|
||||
let maxY = null
|
||||
|
||||
if (thresholds !== null) {
|
||||
maxY = useStatsSeries
|
||||
? (statisticsSeries.max.reduce((max, x) => Math.max(max, x), thresholds.normal) || thresholds.normal)
|
||||
: (series.reduce((max, series) => Math.max(max, series.statistics?.max), thresholds.normal) || thresholds.normal)
|
||||
: null
|
||||
const plotSeries = [{}]
|
||||
|
||||
if (maxY >= (10 * thresholds.normal)) { // Hard y-range render limit if outliers in series data
|
||||
maxY = (10 * thresholds.normal)
|
||||
}
|
||||
}
|
||||
|
||||
const plotSeries = [{label: 'Runtime', value: (u, ts, sidx, didx) => didx == null ? null : formatTime(ts, forNode)}]
|
||||
const plotData = [new Array(longestSeries)]
|
||||
for (let i = 0; i < longestSeries; i++) // TODO: Cache/Reuse this array?
|
||||
plotData[0][i] = i * timestep
|
||||
|
||||
if (forNode === true) {
|
||||
// Negative Timestamp Buildup
|
||||
for (let i = 0; i <= longestSeries; i++) {
|
||||
plotData[0][i] = (longestSeries - i) * timestep * -1
|
||||
}
|
||||
} else {
|
||||
// Positive Timestamp Buildup
|
||||
for (let j = 0; j < longestSeries; j++) // TODO: Cache/Reuse this array?
|
||||
plotData[0][j] = j * timestep
|
||||
}
|
||||
|
||||
let plotBands = undefined
|
||||
if (useStatsSeries) {
|
||||
plotData.push(statisticsSeries.min)
|
||||
plotData.push(statisticsSeries.max)
|
||||
plotData.push(statisticsSeries.mean)
|
||||
plotSeries.push({ scale: 'y', width: lineWidth, stroke: 'red' })
|
||||
plotSeries.push({ scale: 'y', width: lineWidth, stroke: 'green' })
|
||||
plotSeries.push({ scale: 'y', width: lineWidth, stroke: 'black' })
|
||||
|
||||
if (forNode === true) { // timestamp 0 with null value for reversed time axis
|
||||
if (plotData[1].length != 0) plotData[1].push(null)
|
||||
if (plotData[2].length != 0) plotData[2].push(null)
|
||||
if (plotData[3].length != 0) plotData[3].push(null)
|
||||
}
|
||||
|
||||
plotSeries.push({ label: 'min', scale: 'y', width: lineWidth, stroke: 'red' })
|
||||
plotSeries.push({ label: 'max', scale: 'y', width: lineWidth, stroke: 'green' })
|
||||
plotSeries.push({ label: 'mean', scale: 'y', width: lineWidth, stroke: 'black' })
|
||||
|
||||
plotBands = [
|
||||
{ series: [2,3], fill: 'rgba(0,255,0,0.1)' },
|
||||
{ series: [3,1], fill: 'rgba(255,0,0,0.1)' }
|
||||
@ -113,7 +203,11 @@
|
||||
} else {
|
||||
for (let i = 0; i < series.length; i++) {
|
||||
plotData.push(series[i].data)
|
||||
if (forNode === true && plotData[1].length != 0) plotData[1].push(null) // timestamp 0 with null value for reversed time axis
|
||||
plotSeries.push({
|
||||
label: scope === 'node' ? resources[i].hostname :
|
||||
// scope === 'accelerator' ? resources[0].accelerators[i] :
|
||||
scope + ' #' + (i+1),
|
||||
scale: 'y',
|
||||
width: lineWidth,
|
||||
stroke: lineColor(i, series.length)
|
||||
@ -124,13 +218,16 @@
|
||||
const opts = {
|
||||
width,
|
||||
height,
|
||||
plugins: [
|
||||
legendAsTooltipPlugin()
|
||||
],
|
||||
series: plotSeries,
|
||||
axes: [
|
||||
{
|
||||
scale: 'x',
|
||||
space: 35,
|
||||
incrs: timeIncrs(timestep, maxX),
|
||||
values: (_, vals) => vals.map(v => formatTime(v))
|
||||
incrs: timeIncrs(timestep, maxX, forNode),
|
||||
values: (_, vals) => vals.map(v => formatTime(v, forNode))
|
||||
},
|
||||
{
|
||||
scale: 'y',
|
||||
@ -177,8 +274,11 @@
|
||||
x: { time: false },
|
||||
y: maxY ? { range: [0., maxY * 1.1] } : {}
|
||||
},
|
||||
cursor: { show: false },
|
||||
legend: { show: false, live: false }
|
||||
legend : { // Display legend until max 12 Y-dataseries
|
||||
show: (series.length <= 12 || useStatsSeries === true) ? true : false,
|
||||
live: (series.length <= 12 || useStatsSeries === true) ? true : false
|
||||
},
|
||||
cursor: { drag: { x: true, y: true } }
|
||||
}
|
||||
|
||||
// console.log(opts)
|
||||
@ -249,24 +349,35 @@
|
||||
}
|
||||
</script>
|
||||
<script context="module">
|
||||
|
||||
export function formatTime(t) {
|
||||
let h = Math.floor(t / 3600)
|
||||
let m = Math.floor((t % 3600) / 60)
|
||||
if (h == 0)
|
||||
return `${m}m`
|
||||
else if (m == 0)
|
||||
return `${h}h`
|
||||
else
|
||||
return `${h}:${m}h`
|
||||
export function formatTime(t, forNode = false) {
|
||||
if (t !== null) {
|
||||
if (isNaN(t)) {
|
||||
return t
|
||||
} else {
|
||||
const tAbs = Math.abs(t)
|
||||
const h = Math.floor(tAbs / 3600)
|
||||
const m = Math.floor((tAbs % 3600) / 60)
|
||||
// Re-Add "negativity" to time ticks only as string, so that if-cases work as intended
|
||||
if (h == 0)
|
||||
return `${forNode && m != 0 ? '-' : ''}${m}m`
|
||||
else if (m == 0)
|
||||
return `${forNode?'-':''}${h}h`
|
||||
else
|
||||
return `${forNode?'-':''}${h}:${m}h`
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function timeIncrs(timestep, maxX) {
|
||||
let incrs = []
|
||||
for (let t = timestep; t < maxX; t *= 10)
|
||||
incrs.push(t, t * 2, t * 3, t * 5)
|
||||
export function timeIncrs(timestep, maxX, forNode) {
|
||||
if (forNode === true) {
|
||||
return [60, 300, 900, 1800, 3600, 7200, 14400, 21600] // forNode fixed increments
|
||||
} else {
|
||||
let incrs = []
|
||||
for (let t = timestep; t < maxX; t *= 10)
|
||||
incrs.push(t, t * 2, t * 3, t * 5)
|
||||
|
||||
return incrs
|
||||
return incrs
|
||||
}
|
||||
}
|
||||
|
||||
export function findThresholds(metricConfig, scope, subCluster) {
|
||||
@ -323,8 +434,9 @@
|
||||
{#if series[0].data.length > 0}
|
||||
<div bind:this={plotWrapper} class="cc-plot"></div>
|
||||
{:else}
|
||||
<Card style="margin-left: 2rem;margin-right: 2rem;" body color="warning">Cannot render plot: No series data returned for <code>{metric}</code></Card>
|
||||
<Card class="mx-4" body color="warning">Cannot render plot: No series data returned for <code>{metric}</code></Card>
|
||||
{/if}
|
||||
|
||||
<style>
|
||||
.cc-plot {
|
||||
border-radius: 5px;
|
||||
|
81
web/frontend/src/plots/Pie.svelte
Normal file
81
web/frontend/src/plots/Pie.svelte
Normal file
@ -0,0 +1,81 @@
|
||||
<script context="module">
|
||||
// http://tsitsul.in/blog/coloropt/ : 12 colors normal
|
||||
export const colors = [
|
||||
'rgb(235,172,35)',
|
||||
'rgb(184,0,88)',
|
||||
'rgb(0,140,249)',
|
||||
'rgb(0,110,0)',
|
||||
'rgb(0,187,173)',
|
||||
'rgb(209,99,230)',
|
||||
'rgb(178,69,2)',
|
||||
'rgb(255,146,135)',
|
||||
'rgb(89,84,214)',
|
||||
'rgb(0,198,248)',
|
||||
'rgb(135,133,0)',
|
||||
'rgb(0,167,108)',
|
||||
'rgb(189,189,189)'
|
||||
]
|
||||
</script>
|
||||
<script>
|
||||
import { Pie } from 'svelte-chartjs';
|
||||
import {
|
||||
Chart as ChartJS,
|
||||
Title,
|
||||
Tooltip,
|
||||
Legend,
|
||||
Filler,
|
||||
ArcElement,
|
||||
CategoryScale
|
||||
} from 'chart.js';
|
||||
|
||||
ChartJS.register(
|
||||
Title,
|
||||
Tooltip,
|
||||
Legend,
|
||||
Filler,
|
||||
ArcElement,
|
||||
CategoryScale
|
||||
);
|
||||
|
||||
export let size
|
||||
export let sliceLabel
|
||||
export let quantities
|
||||
export let entities
|
||||
export let displayLegend = false
|
||||
|
||||
$: data = {
|
||||
labels: entities,
|
||||
datasets: [
|
||||
{
|
||||
label: sliceLabel,
|
||||
data: quantities,
|
||||
fill: 1,
|
||||
backgroundColor: colors.slice(0, quantities.length)
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
const options = {
|
||||
maintainAspectRatio: false,
|
||||
animation: false,
|
||||
plugins: {
|
||||
legend: {
|
||||
display: displayLegend
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
</script>
|
||||
|
||||
<div class="chart-container" style="--container-width: {size}; --container-height: {size}">
|
||||
<Pie {data} {options}/>
|
||||
</div>
|
||||
|
||||
<style>
|
||||
.chart-container {
|
||||
position: relative;
|
||||
margin: auto;
|
||||
height: var(--container-height);
|
||||
width: var(--container-width);
|
||||
}
|
||||
</style>
|
@ -1,22 +1,34 @@
|
||||
<div>
|
||||
<canvas bind:this={canvasElement} width="{width}" height="{height}"></canvas>
|
||||
</div>
|
||||
|
||||
<script>
|
||||
import { onMount, getContext } from 'svelte'
|
||||
import { getContext } from 'svelte'
|
||||
import { Radar } from 'svelte-chartjs';
|
||||
import {
|
||||
Chart as ChartJS,
|
||||
Title,
|
||||
Tooltip,
|
||||
Legend,
|
||||
Filler,
|
||||
PointElement,
|
||||
RadialLinearScale,
|
||||
LineElement
|
||||
} from 'chart.js';
|
||||
|
||||
ChartJS.register(
|
||||
Title,
|
||||
Tooltip,
|
||||
Legend,
|
||||
Filler,
|
||||
PointElement,
|
||||
RadialLinearScale,
|
||||
LineElement
|
||||
);
|
||||
|
||||
export let size
|
||||
export let metrics
|
||||
export let width
|
||||
export let height
|
||||
export let cluster
|
||||
export let jobMetrics
|
||||
|
||||
const fontSize = 12
|
||||
const fontFamily = 'system-ui, -apple-system, "Segoe UI", Roboto, "Helvetica Neue", Arial, "Noto Sans", sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol", "Noto Color Emoji"'
|
||||
const metricConfig = getContext('metrics')
|
||||
|
||||
let ctx, canvasElement
|
||||
|
||||
const labels = metrics.filter(name => {
|
||||
if (!jobMetrics.find(m => m.name == name && m.scope == "node")) {
|
||||
console.warn(`PolarPlot: No metric data for '${name}'`)
|
||||
@ -46,145 +58,49 @@
|
||||
return avg / metric.series.length
|
||||
}
|
||||
|
||||
const data = [
|
||||
{
|
||||
name: 'Max',
|
||||
values: getValuesForStat(getMax),
|
||||
color: 'rgb(0, 102, 255)',
|
||||
areaColor: 'rgba(0, 102, 255, 0.25)'
|
||||
},
|
||||
{
|
||||
name: 'Avg',
|
||||
values: getValuesForStat(getAvg),
|
||||
color: 'rgb(255, 153, 0)',
|
||||
areaColor: 'rgba(255, 153, 0, 0.25)'
|
||||
}
|
||||
]
|
||||
|
||||
function render() {
|
||||
if (!width || Number.isNaN(width))
|
||||
return
|
||||
|
||||
const centerX = width / 2
|
||||
const centerY = height / 2 - 15
|
||||
const radius = (Math.min(width, height) / 2) - 50
|
||||
|
||||
// Draw circles
|
||||
ctx.lineWidth = 1
|
||||
ctx.strokeStyle = '#999999'
|
||||
ctx.beginPath()
|
||||
ctx.arc(centerX, centerY, radius * 1.0, 0, Math.PI * 2, false)
|
||||
ctx.stroke()
|
||||
ctx.beginPath()
|
||||
ctx.arc(centerX, centerY, radius * 0.666, 0, Math.PI * 2, false)
|
||||
ctx.stroke()
|
||||
ctx.beginPath()
|
||||
ctx.arc(centerX, centerY, radius * 0.333, 0, Math.PI * 2, false)
|
||||
ctx.stroke()
|
||||
|
||||
// Axis
|
||||
ctx.font = `${fontSize}px ${fontFamily}`
|
||||
ctx.textAlign = 'center'
|
||||
ctx.fillText('1/3',
|
||||
Math.floor(centerX + radius * 0.333),
|
||||
Math.floor(centerY + 15))
|
||||
ctx.fillText('2/3',
|
||||
Math.floor(centerX + radius * 0.666),
|
||||
Math.floor(centerY + 15))
|
||||
ctx.fillText('1.0',
|
||||
Math.floor(centerX + radius * 1.0),
|
||||
Math.floor(centerY + 15))
|
||||
|
||||
// Label text and straight lines from center
|
||||
for (let i = 0; i < labels.length; i++) {
|
||||
const angle = 2 * Math.PI * ((i + 1) / labels.length)
|
||||
const dx = Math.cos(angle) * radius
|
||||
const dy = Math.sin(angle) * radius
|
||||
ctx.fillText(labels[i],
|
||||
Math.floor(centerX + dx * 1.1),
|
||||
Math.floor(centerY + dy * 1.1))
|
||||
|
||||
ctx.beginPath()
|
||||
ctx.moveTo(centerX, centerY)
|
||||
ctx.lineTo(centerX + dx, centerY + dy)
|
||||
ctx.stroke()
|
||||
}
|
||||
|
||||
for (let dataset of data) {
|
||||
console.assert(dataset.values.length === labels.length, 'this will look confusing')
|
||||
ctx.fillStyle = dataset.color
|
||||
ctx.strokeStyle = dataset.color
|
||||
const points = []
|
||||
for (let i = 0; i < dataset.values.length; i++) {
|
||||
const value = dataset.values[i]
|
||||
const angle = 2 * Math.PI * ((i + 1) / labels.length)
|
||||
const x = centerX + Math.cos(angle) * radius * value
|
||||
const y = centerY + Math.sin(angle) * radius * value
|
||||
|
||||
ctx.beginPath()
|
||||
ctx.arc(x, y, 3, 0, Math.PI * 2, false)
|
||||
ctx.fill()
|
||||
|
||||
points.push({ x, y })
|
||||
const data = {
|
||||
labels: labels,
|
||||
datasets: [
|
||||
{
|
||||
label: 'Max',
|
||||
data: getValuesForStat(getMax),
|
||||
fill: 1,
|
||||
backgroundColor: 'rgba(0, 102, 255, 0.25)',
|
||||
borderColor: 'rgb(0, 102, 255)',
|
||||
pointBackgroundColor: 'rgb(0, 102, 255)',
|
||||
pointBorderColor: '#fff',
|
||||
pointHoverBackgroundColor: '#fff',
|
||||
pointHoverBorderColor: 'rgb(0, 102, 255)'
|
||||
},
|
||||
{
|
||||
label: 'Avg',
|
||||
data: getValuesForStat(getAvg),
|
||||
fill: true,
|
||||
backgroundColor: 'rgba(255, 153, 0, 0.25)',
|
||||
borderColor: 'rgb(255, 153, 0)',
|
||||
pointBackgroundColor: 'rgb(255, 153, 0)',
|
||||
pointBorderColor: '#fff',
|
||||
pointHoverBackgroundColor: '#fff',
|
||||
pointHoverBorderColor: 'rgb(255, 153, 0)'
|
||||
}
|
||||
|
||||
// "Fill" the shape this dataset has
|
||||
ctx.fillStyle = dataset.areaColor
|
||||
ctx.beginPath()
|
||||
ctx.moveTo(points[0].x, points[0].y)
|
||||
for (let p of points)
|
||||
ctx.lineTo(p.x, p.y)
|
||||
ctx.lineTo(points[0].x, points[0].y)
|
||||
ctx.stroke()
|
||||
ctx.fill()
|
||||
}
|
||||
|
||||
// Legend at the bottom left corner
|
||||
ctx.textAlign = 'left'
|
||||
let paddingLeft = 0
|
||||
for (let dataset of data) {
|
||||
const text = `${dataset.name}: `
|
||||
const textWidth = ctx.measureText(text).width
|
||||
ctx.fillStyle = 'black'
|
||||
ctx.fillText(text, paddingLeft, height - 20)
|
||||
|
||||
ctx.fillStyle = dataset.color
|
||||
ctx.beginPath()
|
||||
ctx.arc(paddingLeft + textWidth + 5, height - 25, 5, 0, Math.PI * 2, false)
|
||||
ctx.fill()
|
||||
|
||||
paddingLeft += textWidth + 15
|
||||
}
|
||||
ctx.fillStyle = 'black'
|
||||
ctx.fillText(`Values relative to respective peak.`, 0, height - 7)
|
||||
]
|
||||
}
|
||||
|
||||
let mounted = false
|
||||
onMount(() => {
|
||||
canvasElement.width = width
|
||||
canvasElement.height = height
|
||||
ctx = canvasElement.getContext('2d')
|
||||
render(ctx, data, width, height)
|
||||
mounted = true
|
||||
})
|
||||
|
||||
let timeoutId = null
|
||||
function sizeChanged() {
|
||||
if (!mounted)
|
||||
return;
|
||||
|
||||
if (timeoutId != null)
|
||||
clearTimeout(timeoutId)
|
||||
|
||||
timeoutId = setTimeout(() => {
|
||||
timeoutId = null
|
||||
|
||||
canvasElement.width = width
|
||||
canvasElement.height = height
|
||||
ctx = canvasElement.getContext('2d')
|
||||
render(ctx, data, width, height)
|
||||
}, 250)
|
||||
// No custom defined options but keep for clarity
|
||||
const options = {
|
||||
maintainAspectRatio: false,
|
||||
animation: false
|
||||
}
|
||||
|
||||
$: sizeChanged(width, height)
|
||||
</script>
|
||||
|
||||
<div class="chart-container">
|
||||
<Radar {data} {options} width={size} height={size}/>
|
||||
</div>
|
||||
|
||||
<style>
|
||||
.chart-container {
|
||||
margin: auto;
|
||||
position: relative;
|
||||
}
|
||||
</style>
|
@ -6,11 +6,15 @@ const power = [1, 1e3, 1e6, 1e9, 1e12, 1e15, 1e18, 1e21]
|
||||
const prefix = ['', 'K', 'M', 'G', 'T', 'P', 'E']
|
||||
|
||||
export function formatNumber(x) {
|
||||
for (let i = 0; i < prefix.length; i++)
|
||||
if (power[i] <= x && x < power[i+1])
|
||||
return `${Math.round((x / power[i]) * 100) / 100} ${prefix[i]}`
|
||||
if ( isNaN(x) ) {
|
||||
return x // Return if String , used in Histograms
|
||||
} else {
|
||||
for (let i = 0; i < prefix.length; i++)
|
||||
if (power[i] <= x && x < power[i+1])
|
||||
return `${Math.round((x / power[i]) * 100) / 100} ${prefix[i]}`
|
||||
|
||||
return Math.abs(x) >= 1000 ? x.toExponential() : x.toString()
|
||||
return Math.abs(x) >= 1000 ? x.toExponential() : x.toString()
|
||||
}
|
||||
}
|
||||
|
||||
export function scaleNumbers(x, y , p = '') {
|
||||
|
@ -6,6 +6,7 @@ import {
|
||||
} from "@urql/svelte";
|
||||
import { setContext, getContext, hasContext, onDestroy, tick } from "svelte";
|
||||
import { readable } from "svelte/store";
|
||||
import { formatNumber } from './units.js'
|
||||
|
||||
/*
|
||||
* Call this function only at component initialization time!
|
||||
@ -313,3 +314,52 @@ export function checkMetricDisabled(m, c, s) { //[m]etric, [c]luster, [s]ubclust
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
export function convert2uplot(canvasData) {
|
||||
// initial use: Canvas Histogram Data to Uplot
|
||||
let uplotData = [[],[]] // [X, Y1, Y2, ...]
|
||||
canvasData.forEach( pair => {
|
||||
uplotData[0].push(pair.value)
|
||||
uplotData[1].push(pair.count)
|
||||
})
|
||||
return uplotData
|
||||
}
|
||||
|
||||
export function binsFromFootprint(weights, scope, values, numBins) {
|
||||
let min = 0, max = 0
|
||||
if (values.length != 0) {
|
||||
for (let x of values) {
|
||||
min = Math.min(min, x)
|
||||
max = Math.max(max, x)
|
||||
}
|
||||
max += 1 // So that we have an exclusive range.
|
||||
}
|
||||
|
||||
if (numBins == null || numBins < 3)
|
||||
numBins = 3
|
||||
|
||||
let scopeWeights
|
||||
switch (scope) {
|
||||
case 'core':
|
||||
scopeWeights = weights.coreHours
|
||||
break
|
||||
case 'accelerator':
|
||||
scopeWeights = weights.accHours
|
||||
break
|
||||
default: // every other scope: use 'node'
|
||||
scopeWeights = weights.nodeHours
|
||||
}
|
||||
|
||||
const rawBins = new Array(numBins).fill(0)
|
||||
for (let i = 0; i < values.length; i++)
|
||||
rawBins[Math.floor(((values[i] - min) / (max - min)) * numBins)] += scopeWeights ? scopeWeights[i] : 1
|
||||
|
||||
const bins = rawBins.map((count, idx) => ({
|
||||
value: Math.floor(min + ((idx + 1) / numBins) * (max - min)),
|
||||
count: count
|
||||
}))
|
||||
|
||||
return {
|
||||
bins: bins
|
||||
}
|
||||
}
|
||||
|
@ -11,7 +11,6 @@ import (
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
"github.com/ClusterCockpit/cc-backend/internal/auth"
|
||||
"github.com/ClusterCockpit/cc-backend/internal/config"
|
||||
"github.com/ClusterCockpit/cc-backend/internal/util"
|
||||
"github.com/ClusterCockpit/cc-backend/pkg/log"
|
||||
@ -92,8 +91,8 @@ type Page struct {
|
||||
Title string // Page title
|
||||
MsgType string // For generic use in message boxes
|
||||
Message string // For generic use in message boxes
|
||||
User auth.User // Information about the currently logged in user (Full User Info)
|
||||
Roles map[string]auth.Role // Available roles for frontend render checks
|
||||
User schema.User // Information about the currently logged in user (Full User Info)
|
||||
Roles map[string]schema.Role // Available roles for frontend render checks
|
||||
Build Build // Latest information about the application
|
||||
Clusters []schema.ClusterConfig // List of all clusters for use in the Header
|
||||
FilterPresets map[string]interface{} // For pages with the Filter component, this can be used to set initial filters.
|
||||
@ -101,7 +100,7 @@ type Page struct {
|
||||
Config map[string]interface{} // UI settings for the currently logged in user (e.g. line width, ...)
|
||||
}
|
||||
|
||||
func RenderTemplate(rw http.ResponseWriter, r *http.Request, file string, page *Page) {
|
||||
func RenderTemplate(rw http.ResponseWriter, file string, page *Page) {
|
||||
t, ok := templates[file]
|
||||
if !ok {
|
||||
log.Errorf("WEB/WEB > template '%s' not found", file)
|
||||
|
Loading…
x
Reference in New Issue
Block a user