This commit is contained in:
Christoph Kluge
2026-01-21 12:21:56 +01:00
29 changed files with 200 additions and 203 deletions

View File

@@ -34,14 +34,14 @@ const configString = `
"addr": "127.0.0.1:8080",
"short-running-jobs-duration": 300,
"resampling": {
"minimumPoints": 600,
"minimum-points": 600,
"trigger": 300,
"resolutions": [
240,
60
]
},
"apiAllowedIPs": [
"api-allowed-ips": [
"*"
],
"emission-constant": 317

View File

@@ -107,7 +107,7 @@ func initConfiguration() error {
}
func initDatabase() error {
repository.Connect(config.Keys.DBDriver, config.Keys.DB)
repository.Connect(config.Keys.DB)
return nil
}

View File

@@ -5,22 +5,22 @@
"https-key-file": "/etc/letsencrypt/live/url/privkey.pem",
"user": "clustercockpit",
"group": "clustercockpit",
"apiAllowedIPs": [
"api-allowed-ips": [
"*"
],
"short-running-jobs-duration": 300,
"enable-job-taggers": true,
"resampling": {
"minimumPoints": 600,
"minimum-points": 600,
"trigger": 180,
"resolutions": [
240,
60
]
},
"apiSubjects": {
"subjectJobEvent": "cc.job.event",
"subjectNodeState": "cc.node.state"
"api-subjects": {
"subject-job-event": "cc.job.event",
"subject-node-state": "cc.node.state"
}
},
"nats": {
@@ -42,8 +42,8 @@
"kind": "s3",
"endpoint": "http://x.x.x.x",
"bucket": "jobarchive",
"accessKey": "xx",
"secretKey": "xx",
"access-key": "xx",
"secret-key": "xx",
"retention": {
"policy": "move",
"age": 365,

View File

@@ -1,38 +1,38 @@
{
"jobList": {
"usePaging": false,
"showFootprint":false
"job-list": {
"use-paging": false,
"show-footprint":false
},
"jobView": {
"showPolarPlot": true,
"showFootprint": true,
"showRoofline": true,
"showStatTable": true
"job-view": {
"show-polar-plot": true,
"show-footprint": true,
"show-roofline": true,
"show-stat-table": true
},
"metricConfig": {
"jobListMetrics": ["mem_bw", "flops_dp"],
"jobViewPlotMetrics": ["mem_bw", "flops_dp"],
"jobViewTableMetrics": ["mem_bw", "flops_dp"],
"metric-config": {
"job-list-metrics": ["mem_bw", "flops_dp"],
"job-view-plot-metrics": ["mem_bw", "flops_dp"],
"job-view-table-metrics": ["mem_bw", "flops_dp"],
"clusters": [
{
"name": "test",
"subClusters": [
"sub-clusters": [
{
"name": "one",
"jobListMetrics": ["mem_used", "flops_sp"]
"job-list-metrics": ["mem_used", "flops_sp"]
}
]
}
]
},
"nodeList": {
"usePaging": true
"node-list": {
"use-paging": true
},
"plotConfiguration": {
"plotsPerRow": 3,
"colorBackground": true,
"lineWidth": 3,
"colorScheme": [
"plot-configuration": {
"plots-per-row": 3,
"color-background": true,
"line-width": 3,
"color-scheme": [
"#00bfff",
"#0000ff",
"#ff00ff",

View File

@@ -42,7 +42,7 @@ func setup(t *testing.T) *api.RestAPI {
"main": {
"addr": "0.0.0.0:8080",
"validate": false,
"apiAllowedIPs": [
"api-allowed-ips": [
"*"
]
},
@@ -152,7 +152,7 @@ func setup(t *testing.T) *api.RestAPI {
}
archiveCfg := fmt.Sprintf("{\"kind\": \"file\",\"path\": \"%s\"}", jobarchive)
repository.Connect("sqlite3", dbfilepath)
repository.Connect(dbfilepath)
if err := archive.Init(json.RawMessage(archiveCfg)); err != nil {
t.Fatal(err)

View File

@@ -36,7 +36,7 @@ func setupNatsTest(t *testing.T) *NatsAPI {
"main": {
"addr": "0.0.0.0:8080",
"validate": false,
"apiAllowedIPs": [
"api-allowed-ips": [
"*"
]
},
@@ -146,7 +146,7 @@ func setupNatsTest(t *testing.T) *NatsAPI {
}
archiveCfg := fmt.Sprintf("{\"kind\": \"file\",\"path\": \"%s\"}", jobarchive)
repository.Connect("sqlite3", dbfilepath)
repository.Connect(dbfilepath)
if err := archive.Init(json.RawMessage(archiveCfg)); err != nil {
t.Fatal(err)

View File

@@ -25,20 +25,20 @@ type JWTAuthConfig struct {
MaxAge string `json:"max-age"`
// Specifies which cookie should be checked for a JWT token (if no authorization header is present)
CookieName string `json:"cookieName"`
CookieName string `json:"cookie-name"`
// Deny login for users not in database (but defined in JWT).
// Ignore user roles defined in JWTs ('roles' claim), get them from db.
ValidateUser bool `json:"validateUser"`
ValidateUser bool `json:"validate-user"`
// Specifies which issuer should be accepted when validating external JWTs ('iss' claim)
TrustedIssuer string `json:"trustedIssuer"`
TrustedIssuer string `json:"trusted-issuer"`
// Should an non-existent user be added to the DB based on the information in the token
SyncUserOnLogin bool `json:"syncUserOnLogin"`
SyncUserOnLogin bool `json:"sync-user-on-login"`
// Should an existent user be updated in the DB based on the information in the token
UpdateUserOnLogin bool `json:"updateUserOnLogin"`
UpdateUserOnLogin bool `json:"update-user-on-login"`
}
type JWTAuthenticator struct {

View File

@@ -20,16 +20,16 @@ import (
type LdapConfig struct {
URL string `json:"url"`
UserBase string `json:"user_base"`
SearchDN string `json:"search_dn"`
UserBind string `json:"user_bind"`
UserFilter string `json:"user_filter"`
UserAttr string `json:"username_attr"`
SyncInterval string `json:"sync_interval"` // Parsed using time.ParseDuration.
SyncDelOldUsers bool `json:"sync_del_old_users"`
UserBase string `json:"user-base"`
SearchDN string `json:"search-dn"`
UserBind string `json:"user-bind"`
UserFilter string `json:"user-filter"`
UserAttr string `json:"username-attr"`
SyncInterval string `json:"sync-interval"` // Parsed using time.ParseDuration.
SyncDelOldUsers bool `json:"sync-del-old-users"`
// Should an non-existent user be added to the DB if user exists in ldap directory
SyncUserOnLogin bool `json:"syncUserOnLogin"`
SyncUserOnLogin bool `json:"sync-user-on-login"`
}
type LdapAuthenticator struct {

View File

@@ -24,8 +24,8 @@ import (
type OpenIDConfig struct {
Provider string `json:"provider"`
SyncUserOnLogin bool `json:"syncUserOnLogin"`
UpdateUserOnLogin bool `json:"updateUserOnLogin"`
SyncUserOnLogin bool `json:"sync-user-on-login"`
UpdateUserOnLogin bool `json:"update-user-on-login"`
}
type OIDC struct {

View File

@@ -15,37 +15,44 @@ var configSchema = `
"description": "Configure how long a token is valid. As string parsable by time.ParseDuration()",
"type": "string"
},
"cookieName": {
"cookie-name": {
"description": "Cookie that should be checked for a JWT token.",
"type": "string"
},
"validateUser": {
"validate-user": {
"description": "Deny login for users not in database (but defined in JWT). Overwrite roles in JWT with database roles.",
"type": "boolean"
},
"trustedIssuer": {
"trusted-issuer": {
"description": "Issuer that should be accepted when validating external JWTs ",
"type": "string"
},
"syncUserOnLogin": {
"sync-user-on-login": {
"description": "Add non-existent user to DB at login attempt with values provided in JWT.",
"type": "boolean"
},
"update-user-on-login": {
"description": "Should an existent user attributes in the DB be updated at login attempt with values provided in JWT.",
"type": "boolean"
}
},
"required": ["max-age"]
},
"oidc": {
"type": "object",
"properties": {
"provider": {
"description": "",
"description": "OpenID Connect provider URL.",
"type": "string"
},
"syncUserOnLogin": {
"description": "",
"sync-user-on-login": {
"description": "Add non-existent user to DB at login attempt with values provided.",
"type": "boolean"
},
"updateUserOnLogin": {
"description": "",
"update-user-on-login": {
"description": "Should an existent user attributes in the DB be updated at login attempt with values provided.",
"type": "boolean"
}
},
"required": ["provider"]
},
@@ -57,40 +64,40 @@ var configSchema = `
"description": "URL of LDAP directory server.",
"type": "string"
},
"user_base": {
"user-base": {
"description": "Base DN of user tree root.",
"type": "string"
},
"search_dn": {
"search-dn": {
"description": "DN for authenticating LDAP admin account with general read rights.",
"type": "string"
},
"user_bind": {
"user-bind": {
"description": "Expression used to authenticate users via LDAP bind. Must contain uid={username}.",
"type": "string"
},
"user_filter": {
"user-filter": {
"description": "Filter to extract users for syncing.",
"type": "string"
},
"username_attr": {
"username-attr": {
"description": "Attribute with full username. Default: gecos",
"type": "string"
},
"sync_interval": {
"sync-interval": {
"description": "Interval used for syncing local user table with LDAP directory. Parsed using time.ParseDuration.",
"type": "string"
},
"sync_del_old_users": {
"sync-del-old-users": {
"description": "Delete obsolete users in database.",
"type": "boolean"
},
"syncUserOnLogin": {
"sync-user-on-login": {
"description": "Add non-existent user to DB at login attempt if user exists in Ldap directory",
"type": "boolean"
}
},
"required": ["url", "user_base", "search_dn", "user_bind", "user_filter"]
"required": ["url", "user-base", "search-dn", "user-bind", "user-filter"]
},
"required": ["jwts"]
}`

View File

@@ -20,9 +20,9 @@ type ProgramConfig struct {
Addr string `json:"addr"`
// Addresses from which secured admin API endpoints can be reached, can be wildcard "*"
APIAllowedIPs []string `json:"apiAllowedIPs"`
APIAllowedIPs []string `json:"api-allowed-ips"`
APISubjects *NATSConfig `json:"apiSubjects"`
APISubjects *NATSConfig `json:"api-subjects"`
// Drop root permissions once .env was read and the port was taken.
User string `json:"user"`
@@ -37,9 +37,6 @@ type ProgramConfig struct {
EmbedStaticFiles bool `json:"embed-static-files"`
StaticFiles string `json:"static-files"`
// Database driver - only 'sqlite3' is supported
DBDriver string `json:"db-driver"`
// Path to SQLite database file
DB string `json:"db"`
@@ -78,7 +75,7 @@ type ProgramConfig struct {
type ResampleConfig struct {
// Minimum number of points to trigger resampling of data
MinimumPoints int `json:"minimumPoints"`
MinimumPoints int `json:"minimum-points"`
// Array of resampling target resolutions, in seconds; Example: [600,300,60]
Resolutions []int `json:"resolutions"`
// Trigger next zoom level at less than this many visible datapoints
@@ -86,8 +83,8 @@ type ResampleConfig struct {
}
type NATSConfig struct {
SubjectJobEvent string `json:"subjectJobEvent"`
SubjectNodeState string `json:"subjectNodeState"`
SubjectJobEvent string `json:"subject-job-event"`
SubjectNodeState string `json:"subject-node-state"`
}
type IntRange struct {
@@ -103,17 +100,14 @@ type TimeRange struct {
type FilterRanges struct {
Duration *IntRange `json:"duration"`
NumNodes *IntRange `json:"numNodes"`
StartTime *TimeRange `json:"startTime"`
NumNodes *IntRange `json:"num-nodes"`
StartTime *TimeRange `json:"start-time"`
}
var Keys ProgramConfig = ProgramConfig{
Addr: "localhost:8080",
DisableAuthentication: false,
EmbedStaticFiles: true,
DBDriver: "sqlite3",
DB: "./var/job.db",
Validate: false,
SessionMaxAge: "168h",
StopJobsExceedingWalltime: 0,
ShortRunningJobsDuration: 5 * 60,

View File

@@ -15,7 +15,7 @@ import (
type DefaultMetricsCluster struct {
Name string `json:"name"`
DefaultMetrics string `json:"default_metrics"`
DefaultMetrics string `json:"default-metrics"`
}
type DefaultMetricsConfig struct {

View File

@@ -13,7 +13,7 @@ var configSchema = `
"description": "Address where the http (or https) server will listen on (for example: 'localhost:80').",
"type": "string"
},
"apiAllowedIPs": {
"api-allowed-ips": {
"description": "Addresses from which secured API endpoints can be reached",
"type": "array",
"items": {
@@ -98,7 +98,7 @@ var configSchema = `
"description": "Enable dynamic zoom in frontend metric plots.",
"type": "object",
"properties": {
"minimumPoints": {
"minimum-points": {
"description": "Minimum points to trigger resampling of time-series data.",
"type": "integer"
},
@@ -116,20 +116,20 @@ var configSchema = `
},
"required": ["trigger", "resolutions"]
},
"apiSubjects": {
"api-subjects": {
"description": "NATS subjects configuration for subscribing to job and node events.",
"type": "object",
"properties": {
"subjectJobEvent": {
"subject-job-event": {
"description": "NATS subject for job events (start_job, stop_job)",
"type": "string"
},
"subjectNodeState": {
"subject-node-state": {
"description": "NATS subject for node state updates",
"type": "string"
}
},
"required": ["subjectJobEvent", "subjectNodeState"]
"required": ["subject-job-event", "subject-node-state"]
}
}
}`

View File

@@ -50,7 +50,7 @@ func setup(t *testing.T) *repository.JobRepository {
"main": {
"addr": "0.0.0.0:8080",
"validate": false,
"apiAllowedIPs": [
"api-allowed-ips": [
"*"
]},
"archive": {
@@ -104,7 +104,7 @@ func setup(t *testing.T) *repository.JobRepository {
t.Fatal(err)
}
repository.Connect("sqlite3", dbfilepath)
repository.Connect(dbfilepath)
return repository.GetJobRepository()
}

View File

@@ -68,7 +68,7 @@ const configSchema = `{
"type": "integer"
},
"nats-subscriptions": {
"description": "Array of various subscriptions. Allows to subscibe to different subjects and publishers.",
"description": "Array of various subscriptions. Allows to subscribe to different subjects and publishers.",
"type": "array",
"items": {
"type": "object",

View File

@@ -51,14 +51,10 @@ func setupSqlite(db *sql.DB) error {
return nil
}
func Connect(driver string, db string) {
func Connect(db string) {
var err error
var dbHandle *sqlx.DB
if driver != "sqlite3" {
cclog.Abortf("Unsupported database driver '%s'. Only 'sqlite3' is supported.\n", driver)
}
dbConnOnce.Do(func() {
opts := DatabaseOptions{
URL: db,
@@ -100,7 +96,7 @@ func Connect(driver string, db string) {
dbHandle.SetConnMaxLifetime(opts.ConnectionMaxLifetime)
dbHandle.SetConnMaxIdleTime(opts.ConnectionMaxIdleTime)
dbConnInstance = &DBConnection{DB: dbHandle, Driver: driver}
dbConnInstance = &DBConnection{DB: dbHandle}
err = checkDBVersion(dbHandle.DB)
if err != nil {
cclog.Abortf("DB Connection: Failed DB version check.\nError: %s\n", err.Error())

View File

@@ -26,7 +26,7 @@ func nodeTestSetup(t *testing.T) {
"main": {
"addr": "0.0.0.0:8080",
"validate": false,
"apiAllowedIPs": [
"api-allowed-ips": [
"*"
]
},
@@ -139,7 +139,7 @@ func nodeTestSetup(t *testing.T) {
}
archiveCfg := fmt.Sprintf("{\"kind\": \"file\",\"path\": \"%s\"}", jobarchive)
Connect("sqlite3", dbfilepath)
Connect(dbfilepath)
if err := archive.Init(json.RawMessage(archiveCfg)); err != nil {
t.Fatal(err)

View File

@@ -151,7 +151,7 @@ func setup(tb testing.TB) *JobRepository {
dbfile := "testdata/job.db"
err := MigrateDB(dbfile)
noErr(tb, err)
Connect("sqlite3", dbfile)
Connect(dbfile)
return GetJobRepository()
}

View File

@@ -20,7 +20,7 @@ func setupUserTest(t *testing.T) *UserCfgRepo {
const testconfig = `{
"main": {
"addr": "0.0.0.0:8080",
"apiAllowedIPs": [
"api-allowed-ips": [
"*"
]
},
@@ -36,7 +36,7 @@ func setupUserTest(t *testing.T) *UserCfgRepo {
if err != nil {
t.Fatal(err)
}
Connect("sqlite3", dbfilepath)
Connect(dbfilepath)
tmpdir := t.TempDir()
cfgFilePath := filepath.Join(tmpdir, "config.json")

View File

@@ -19,7 +19,7 @@ func setup(tb testing.TB) *repository.JobRepository {
dbfile := "../repository/testdata/job.db"
err := repository.MigrateDB(dbfile)
noErr(tb, err)
repository.Connect("sqlite3", dbfile)
repository.Connect(dbfile)
return repository.GetJobRepository()
}

View File

@@ -18,7 +18,7 @@ var configSchema = `
"description": "Path to job archive for file backend",
"type": "string"
},
"dbPath": {
"db-path": {
"description": "Path to SQLite database file for sqlite backend",
"type": "string"
},
@@ -26,11 +26,11 @@ var configSchema = `
"description": "S3 endpoint URL (for S3-compatible services like MinIO)",
"type": "string"
},
"accessKey": {
"access-key": {
"description": "S3 access key ID",
"type": "string"
},
"secretKey": {
"secret-key": {
"description": "S3 secret access key",
"type": "string"
},
@@ -42,7 +42,7 @@ var configSchema = `
"description": "AWS region for S3 bucket",
"type": "string"
},
"usePathStyle": {
"use-path-style": {
"description": "Use path-style S3 URLs (required for MinIO and some S3-compatible services)",
"type": "boolean"
},
@@ -59,7 +59,7 @@ var configSchema = `
"type": "string",
"enum": ["none", "delete", "move"]
},
"includeDB": {
"include-db": {
"description": "Also remove jobs from database",
"type": "boolean"
},

View File

@@ -36,11 +36,11 @@ import (
// S3ArchiveConfig holds the configuration for the S3 archive backend.
type S3ArchiveConfig struct {
Endpoint string `json:"endpoint"` // S3 endpoint URL (optional, for MinIO/localstack)
AccessKey string `json:"accessKey"` // AWS access key ID
SecretKey string `json:"secretKey"` // AWS secret access key
AccessKey string `json:"access-key"` // AWS access key ID
SecretKey string `json:"secret-key"` // AWS secret access key
Bucket string `json:"bucket"` // S3 bucket name
Region string `json:"region"` // AWS region
UsePathStyle bool `json:"usePathStyle"` // Use path-style URLs (required for MinIO)
UsePathStyle bool `json:"use-path-style"` // Use path-style URLs (required for MinIO)
}
// S3Archive implements ArchiveBackend using AWS S3 or S3-compatible object storage.

View File

@@ -241,11 +241,11 @@ func TestGetS3Directory(t *testing.T) {
func TestS3ArchiveConfigParsing(t *testing.T) {
rawConfig := json.RawMessage(`{
"endpoint": "http://localhost:9000",
"accessKey": "minioadmin",
"secretKey": "minioadmin",
"access-key": "minioadmin",
"secret-key": "minioadmin",
"bucket": "test-bucket",
"region": "us-east-1",
"usePathStyle": true
"use-path-style": true
}`)
var cfg S3ArchiveConfig

View File

@@ -29,7 +29,7 @@ import (
// SqliteArchiveConfig holds the configuration for the SQLite archive backend.
type SqliteArchiveConfig struct {
DBPath string `json:"dbPath"` // Path to SQLite database file
DBPath string `json:"db-path"` // Path to SQLite database file
}
// SqliteArchive implements ArchiveBackend using a SQLite database with BLOB storage.

View File

@@ -22,7 +22,7 @@ func TestSqliteInitEmptyPath(t *testing.T) {
func TestSqliteInitInvalidConfig(t *testing.T) {
var sa SqliteArchive
_, err := sa.Init(json.RawMessage(`"dbPath":"/tmp/test.db"`))
_, err := sa.Init(json.RawMessage(`"db-path":"/tmp/test.db"`))
if err == nil {
t.Fatal("expected error for invalid config")
}
@@ -33,7 +33,7 @@ func TestSqliteInit(t *testing.T) {
defer os.Remove(tmpfile)
var sa SqliteArchive
version, err := sa.Init(json.RawMessage(`{"dbPath":"` + tmpfile + `"}`))
version, err := sa.Init(json.RawMessage(`{"db-path":"` + tmpfile + `"}`))
if err != nil {
t.Fatalf("init failed: %v", err)
}
@@ -51,7 +51,7 @@ func TestSqliteStoreAndLoadJobMeta(t *testing.T) {
defer os.Remove(tmpfile)
var sa SqliteArchive
_, err := sa.Init(json.RawMessage(`{"dbPath":"` + tmpfile + `"}`))
_, err := sa.Init(json.RawMessage(`{"db-path":"` + tmpfile + `"}`))
if err != nil {
t.Fatalf("init failed: %v", err)
}
@@ -97,7 +97,7 @@ func TestSqliteImportJob(t *testing.T) {
defer os.Remove(tmpfile)
var sa SqliteArchive
_, err := sa.Init(json.RawMessage(`{"dbPath":"` + tmpfile + `"}`))
_, err := sa.Init(json.RawMessage(`{"db-path":"` + tmpfile + `"}`))
if err != nil {
t.Fatalf("init failed: %v", err)
}
@@ -114,7 +114,7 @@ func TestSqliteGetClusters(t *testing.T) {
defer os.Remove(tmpfile)
var sa SqliteArchive
_, err := sa.Init(json.RawMessage(`{"dbPath":"` + tmpfile + `"}`))
_, err := sa.Init(json.RawMessage(`{"db-path":"` + tmpfile + `"}`))
if err != nil {
t.Fatalf("init failed: %v", err)
}
@@ -141,7 +141,7 @@ func TestSqliteGetClusters(t *testing.T) {
// Reinitialize to refresh cluster list
sa.db.Close()
_, err = sa.Init(json.RawMessage(`{"dbPath":"` + tmpfile + `"}`))
_, err = sa.Init(json.RawMessage(`{"db-path":"` + tmpfile + `"}`))
if err != nil {
t.Fatalf("reinit failed: %v", err)
}
@@ -158,7 +158,7 @@ func TestSqliteCleanUp(t *testing.T) {
defer os.Remove(tmpfile)
var sa SqliteArchive
_, err := sa.Init(json.RawMessage(`{"dbPath":"` + tmpfile + `"}`))
_, err := sa.Init(json.RawMessage(`{"db-path":"` + tmpfile + `"}`))
if err != nil {
t.Fatalf("init failed: %v", err)
}
@@ -193,7 +193,7 @@ func TestSqliteClean(t *testing.T) {
defer os.Remove(tmpfile)
var sa SqliteArchive
_, err := sa.Init(json.RawMessage(`{"dbPath":"` + tmpfile + `"}`))
_, err := sa.Init(json.RawMessage(`{"db-path":"` + tmpfile + `"}`))
if err != nil {
t.Fatalf("init failed: %v", err)
}
@@ -237,7 +237,7 @@ func TestSqliteIter(t *testing.T) {
defer os.Remove(tmpfile)
var sa SqliteArchive
_, err := sa.Init(json.RawMessage(`{"dbPath":"` + tmpfile + `"}`))
_, err := sa.Init(json.RawMessage(`{"db-path":"` + tmpfile + `"}`))
if err != nil {
t.Fatalf("init failed: %v", err)
}
@@ -276,7 +276,7 @@ func TestSqliteCompress(t *testing.T) {
defer os.Remove(tmpfile)
var sa SqliteArchive
_, err := sa.Init(json.RawMessage(`{"dbPath":"` + tmpfile + `"}`))
_, err := sa.Init(json.RawMessage(`{"db-path":"` + tmpfile + `"}`))
if err != nil {
t.Fatalf("init failed: %v", err)
}
@@ -299,7 +299,7 @@ func TestSqliteCompress(t *testing.T) {
}
func TestSqliteConfigParsing(t *testing.T) {
rawConfig := json.RawMessage(`{"dbPath": "/tmp/test.db"}`)
rawConfig := json.RawMessage(`{"db-path": "/tmp/test.db"}`)
var cfg SqliteArchiveConfig
err := json.Unmarshal(rawConfig, &cfg)
@@ -317,7 +317,7 @@ func TestSqliteIterChunking(t *testing.T) {
defer os.Remove(tmpfile)
var sa SqliteArchive
_, err := sa.Init(json.RawMessage(`{"dbPath":"` + tmpfile + `"}`))
_, err := sa.Init(json.RawMessage(`{"db-path":"` + tmpfile + `"}`))
if err != nil {
t.Fatalf("init failed: %v", err)
}

View File

@@ -41,7 +41,7 @@ func TestImportFileToSqlite(t *testing.T) {
}
// Initialize destination backend (sqlite)
dstConfig := fmt.Sprintf(`{"kind":"sqlite","dbPath":"%s"}`, dstDb)
dstConfig := fmt.Sprintf(`{"kind":"sqlite","db-path":"%s"}`, dstDb)
dstBackend, err := archive.InitBackend(json.RawMessage(dstConfig))
if err != nil {
t.Fatalf("Failed to initialize destination backend: %s", err.Error())
@@ -176,7 +176,7 @@ func TestImportDataIntegrity(t *testing.T) {
t.Fatalf("Failed to initialize source backend: %s", err.Error())
}
dstConfig := fmt.Sprintf(`{"kind":"sqlite","dbPath":"%s"}`, dstDb)
dstConfig := fmt.Sprintf(`{"kind":"sqlite","db-path":"%s"}`, dstDb)
dstBackend, err := archive.InitBackend(json.RawMessage(dstConfig))
if err != nil {
t.Fatalf("Failed to initialize destination backend: %s", err.Error())
@@ -270,7 +270,7 @@ func TestImportEmptyArchive(t *testing.T) {
t.Fatalf("Failed to initialize source backend: %s", err.Error())
}
dstConfig := fmt.Sprintf(`{"kind":"sqlite","dbPath":"%s"}`, dstDb)
dstConfig := fmt.Sprintf(`{"kind":"sqlite","db-path":"%s"}`, dstDb)
dstBackend, err := archive.InitBackend(json.RawMessage(dstConfig))
if err != nil {
t.Fatalf("Failed to initialize destination backend: %s", err.Error())
@@ -314,7 +314,7 @@ func TestImportDuplicateJobs(t *testing.T) {
t.Fatalf("Failed to initialize source backend: %s", err.Error())
}
dstConfig := fmt.Sprintf(`{"kind":"sqlite","dbPath":"%s"}`, dstDb)
dstConfig := fmt.Sprintf(`{"kind":"sqlite","db-path":"%s"}`, dstDb)
dstBackend, err := archive.InitBackend(json.RawMessage(dstConfig))
if err != nil {
t.Fatalf("Failed to initialize destination backend: %s", err.Error())

View File

@@ -8,57 +8,57 @@ package web
const configSchema = `{
"type": "object",
"properties": {
"jobList": {
"job-list": {
"description": "Job list defaults. Applies to user- and jobs views.",
"type": "object",
"properties": {
"usePaging": {
"use-paging": {
"description": "If classic paging is used instead of continuous scrolling by default.",
"type": "boolean"
},
"showFootprint": {
"show-footprint": {
"description": "If footprint bars are shown as first column by default.",
"type": "boolean"
}
}
},
"nodeList": {
"node-list": {
"description": "Node list defaults. Applies to node list view.",
"type": "object",
"properties": {
"usePaging": {
"use-paging": {
"description": "If classic paging is used instead of continuous scrolling by default.",
"type": "boolean"
}
}
},
"jobView": {
"job-view": {
"description": "Job view defaults.",
"type": "object",
"properties": {
"showPolarPlot": {
"show-polar-plot": {
"description": "If the job metric footprints polar plot is shown by default.",
"type": "boolean"
},
"showFootprint": {
"show-footprint": {
"description": "If the annotated job metric footprint bars are shown by default.",
"type": "boolean"
},
"showRoofline": {
"show-roofline": {
"description": "If the job roofline plot is shown by default.",
"type": "boolean"
},
"showStatTable": {
"show-stat-table": {
"description": "If the job metric statistics table is shown by default.",
"type": "boolean"
}
}
},
"metricConfig": {
"metric-config": {
"description": "Global initial metric selections for primary views of all clusters.",
"type": "object",
"properties": {
"jobListMetrics": {
"job-list-metrics": {
"description": "Initial metrics shown for new users in job lists (User and jobs view).",
"type": "array",
"items": {
@@ -66,7 +66,7 @@ const configSchema = `{
"minItems": 1
}
},
"jobViewPlotMetrics": {
"job-view-plot-metrics": {
"description": "Initial metrics shown for new users as job view metric plots.",
"type": "array",
"items": {
@@ -74,7 +74,7 @@ const configSchema = `{
"minItems": 1
}
},
"jobViewTableMetrics": {
"job-view-table-metrics": {
"description": "Initial metrics shown for new users in job view statistics table.",
"type": "array",
"items": {
@@ -91,7 +91,7 @@ const configSchema = `{
"name": {
"description": "The name of the cluster."
},
"jobListMetrics": {
"job-list-metrics": {
"description": "Initial metrics shown for new users in job lists (User and jobs view) for subcluster.",
"type": "array",
"items": {
@@ -99,7 +99,7 @@ const configSchema = `{
"minItems": 1
}
},
"jobViewPlotMetrics": {
"job-view-plot-metrics": {
"description": "Initial metrics shown for new users as job view timeplots for subcluster.",
"type": "array",
"items": {
@@ -107,7 +107,7 @@ const configSchema = `{
"minItems": 1
}
},
"jobViewTableMetrics": {
"job-view-table-metrics": {
"description": "Initial metrics shown for new users in job view statistics table for subcluster.",
"type": "array",
"items": {
@@ -115,7 +115,7 @@ const configSchema = `{
"minItems": 1
}
},
"subClusters": {
"sub-clusters": {
"description": "The array of overrides per subcluster.",
"type": "array",
"items": {
@@ -125,7 +125,7 @@ const configSchema = `{
"description": "The name of the subcluster.",
"type": "string"
},
"jobListMetrics": {
"job-list-metrics": {
"description": "Initial metrics shown for new users in job lists (User and jobs view) for subcluster.",
"type": "array",
"items": {
@@ -133,7 +133,7 @@ const configSchema = `{
"minItems": 1
}
},
"jobViewPlotMetrics": {
"job-view-plot-metrics": {
"description": "Initial metrics shown for new users as job view timeplots for subcluster.",
"type": "array",
"items": {
@@ -141,7 +141,7 @@ const configSchema = `{
"minItems": 1
}
},
"jobViewTableMetrics": {
"job-view-table-metrics": {
"description": "Initial metrics shown for new users in job view statistics table for subcluster.",
"type": "array",
"items": {
@@ -155,29 +155,29 @@ const configSchema = `{
}
}
},
"required": ["name", "subClusters"],
"required": ["name", "sub-clusters"],
"minItems": 1
}
}
}
},
"plotConfiguration": {
"plot-configuration": {
"description": "Initial settings for plot render options.",
"type": "object",
"properties": {
"colorBackground": {
"color-background": {
"description": "If the metric plot backgrounds are initially colored by threshold limits.",
"type": "boolean"
},
"plotsPerRow": {
"plots-per-row": {
"description": "How many plots are initially rendered in per row. Applies to job, single node, and analysis views.",
"type": "integer"
},
"lineWidth": {
"line-width": {
"description": "Initial thickness of rendered plotlines. Applies to metric plot, job compare plot and roofline.",
"type": "integer"
},
"colorScheme": {
"color-scheme": {
"description": "Initial colorScheme to be used for metric plots.",
"type": "array",
"items": {

View File

@@ -22,56 +22,56 @@ import (
)
type WebConfig struct {
JobList JobListConfig `json:"jobList"`
NodeList NodeListConfig `json:"nodeList"`
JobView JobViewConfig `json:"jobView"`
MetricConfig MetricConfig `json:"metricConfig"`
PlotConfiguration PlotConfiguration `json:"plotConfiguration"`
JobList JobListConfig `json:"job-list"`
NodeList NodeListConfig `json:"node-list"`
JobView JobViewConfig `json:"job-view"`
MetricConfig MetricConfig `json:"metric-config"`
PlotConfiguration PlotConfiguration `json:"plot-configuration"`
}
type JobListConfig struct {
UsePaging bool `json:"usePaging"`
ShowFootprint bool `json:"showFootprint"`
UsePaging bool `json:"use-paging"`
ShowFootprint bool `json:"show-footprint"`
}
type NodeListConfig struct {
UsePaging bool `json:"usePaging"`
UsePaging bool `json:"use-paging"`
}
type JobViewConfig struct {
ShowPolarPlot bool `json:"showPolarPlot"`
ShowFootprint bool `json:"showFootprint"`
ShowRoofline bool `json:"showRoofline"`
ShowStatTable bool `json:"showStatTable"`
ShowPolarPlot bool `json:"show-polar-plot"`
ShowFootprint bool `json:"show-footprint"`
ShowRoofline bool `json:"show-roofline"`
ShowStatTable bool `json:"show-stat-table"`
}
type MetricConfig struct {
JobListMetrics []string `json:"jobListMetrics"`
JobViewPlotMetrics []string `json:"jobViewPlotMetrics"`
JobViewTableMetrics []string `json:"jobViewTableMetrics"`
JobListMetrics []string `json:"job-list-metrics"`
JobViewPlotMetrics []string `json:"job-view-plot-metrics"`
JobViewTableMetrics []string `json:"job-view-table-metrics"`
Clusters []ClusterConfig `json:"clusters"`
}
type ClusterConfig struct {
Name string `json:"name"`
JobListMetrics []string `json:"jobListMetrics"`
JobViewPlotMetrics []string `json:"jobViewPlotMetrics"`
JobViewTableMetrics []string `json:"jobViewTableMetrics"`
SubClusters []SubClusterConfig `json:"subClusters"`
JobListMetrics []string `json:"job-list-metrics"`
JobViewPlotMetrics []string `json:"job-view-plot-metrics"`
JobViewTableMetrics []string `json:"job-view-table-metrics"`
SubClusters []SubClusterConfig `json:"sub-clusters"`
}
type SubClusterConfig struct {
Name string `json:"name"`
JobListMetrics []string `json:"jobListMetrics"`
JobViewPlotMetrics []string `json:"jobViewPlotMetrics"`
JobViewTableMetrics []string `json:"jobViewTableMetrics"`
JobListMetrics []string `json:"job-list-metrics"`
JobViewPlotMetrics []string `json:"job-view-plot-metrics"`
JobViewTableMetrics []string `json:"job-view-table-metrics"`
}
type PlotConfiguration struct {
ColorBackground bool `json:"colorBackground"`
PlotsPerRow int `json:"plotsPerRow"`
LineWidth int `json:"lineWidth"`
ColorScheme []string `json:"colorScheme"`
ColorBackground bool `json:"color-background"`
PlotsPerRow int `json:"plots-per-row"`
LineWidth int `json:"line-width"`
ColorScheme []string `json:"color-scheme"`
}
var UIDefaults = WebConfig{

View File

@@ -20,38 +20,38 @@ func TestInit(t *testing.T) {
Init(cfg)
if UIDefaultsMap["nodelist_usePaging"] == false {
if UIDefaultsMap["nodeList_usePaging"] == false {
t.Errorf("wrong option\ngot: %v \nwant: true", UIDefaultsMap["NodeList_UsePaging"])
}
}
func TestSimpleDefaults(t *testing.T) {
const s = `{
"joblist": {
"showFootprint": false
"job-list": {
"show-footprint": false
}
}`
Init(json.RawMessage(s))
if UIDefaultsMap["joblist_usePaging"] == true {
if UIDefaultsMap["jobList_usePaging"] == true {
t.Errorf("wrong option\ngot: %v \nwant: false", UIDefaultsMap["NodeList_UsePaging"])
}
}
func TestOverwrite(t *testing.T) {
const s = `{
"metricConfig": {
"jobListMetrics": ["flops_sp", "flops_dp"],
"metric-config": {
"job-list-metrics": ["flops_sp", "flops_dp"],
"clusters": [
{
"name": "fritz",
"jobListMetrics": ["flops_any", "mem_bw", "load"],
"subClusters": [
"job-list-metrics": ["flops_any", "mem_bw", "load"],
"sub-clusters": [
{
"name": "icelake",
"jobListMetrics": ["flops_any", "mem_bw", "power", "load"],
"jobViewPlotMetrics": ["load"]
"job-list-metrics": ["flops_any", "mem_bw", "power", "load"],
"job-view-plot-metrics": ["load"]
}
]
}