mirror of
https://github.com/ClusterCockpit/cc-backend
synced 2026-01-28 06:51:45 +01:00
Unify JSON attribute naming ot use kebab style case. Cleanup configuration.
This commit is contained in:
@@ -42,7 +42,7 @@ func setup(t *testing.T) *api.RestAPI {
|
||||
"main": {
|
||||
"addr": "0.0.0.0:8080",
|
||||
"validate": false,
|
||||
"apiAllowedIPs": [
|
||||
"api-allowed-ips": [
|
||||
"*"
|
||||
]
|
||||
},
|
||||
@@ -152,7 +152,7 @@ func setup(t *testing.T) *api.RestAPI {
|
||||
}
|
||||
archiveCfg := fmt.Sprintf("{\"kind\": \"file\",\"path\": \"%s\"}", jobarchive)
|
||||
|
||||
repository.Connect("sqlite3", dbfilepath)
|
||||
repository.Connect(dbfilepath)
|
||||
|
||||
if err := archive.Init(json.RawMessage(archiveCfg)); err != nil {
|
||||
t.Fatal(err)
|
||||
|
||||
@@ -36,7 +36,7 @@ func setupNatsTest(t *testing.T) *NatsAPI {
|
||||
"main": {
|
||||
"addr": "0.0.0.0:8080",
|
||||
"validate": false,
|
||||
"apiAllowedIPs": [
|
||||
"api-allowed-ips": [
|
||||
"*"
|
||||
]
|
||||
},
|
||||
@@ -146,7 +146,7 @@ func setupNatsTest(t *testing.T) *NatsAPI {
|
||||
}
|
||||
archiveCfg := fmt.Sprintf("{\"kind\": \"file\",\"path\": \"%s\"}", jobarchive)
|
||||
|
||||
repository.Connect("sqlite3", dbfilepath)
|
||||
repository.Connect(dbfilepath)
|
||||
|
||||
if err := archive.Init(json.RawMessage(archiveCfg)); err != nil {
|
||||
t.Fatal(err)
|
||||
|
||||
@@ -25,20 +25,20 @@ type JWTAuthConfig struct {
|
||||
MaxAge string `json:"max-age"`
|
||||
|
||||
// Specifies which cookie should be checked for a JWT token (if no authorization header is present)
|
||||
CookieName string `json:"cookieName"`
|
||||
CookieName string `json:"cookie-name"`
|
||||
|
||||
// Deny login for users not in database (but defined in JWT).
|
||||
// Ignore user roles defined in JWTs ('roles' claim), get them from db.
|
||||
ValidateUser bool `json:"validateUser"`
|
||||
ValidateUser bool `json:"validate-user"`
|
||||
|
||||
// Specifies which issuer should be accepted when validating external JWTs ('iss' claim)
|
||||
TrustedIssuer string `json:"trustedIssuer"`
|
||||
TrustedIssuer string `json:"trusted-issuer"`
|
||||
|
||||
// Should an non-existent user be added to the DB based on the information in the token
|
||||
SyncUserOnLogin bool `json:"syncUserOnLogin"`
|
||||
SyncUserOnLogin bool `json:"sync-user-on-login"`
|
||||
|
||||
// Should an existent user be updated in the DB based on the information in the token
|
||||
UpdateUserOnLogin bool `json:"updateUserOnLogin"`
|
||||
UpdateUserOnLogin bool `json:"update-user-on-login"`
|
||||
}
|
||||
|
||||
type JWTAuthenticator struct {
|
||||
|
||||
@@ -20,16 +20,16 @@ import (
|
||||
|
||||
type LdapConfig struct {
|
||||
URL string `json:"url"`
|
||||
UserBase string `json:"user_base"`
|
||||
SearchDN string `json:"search_dn"`
|
||||
UserBind string `json:"user_bind"`
|
||||
UserFilter string `json:"user_filter"`
|
||||
UserAttr string `json:"username_attr"`
|
||||
SyncInterval string `json:"sync_interval"` // Parsed using time.ParseDuration.
|
||||
SyncDelOldUsers bool `json:"sync_del_old_users"`
|
||||
UserBase string `json:"user-base"`
|
||||
SearchDN string `json:"search-dn"`
|
||||
UserBind string `json:"user-bind"`
|
||||
UserFilter string `json:"user-filter"`
|
||||
UserAttr string `json:"username-attr"`
|
||||
SyncInterval string `json:"sync-interval"` // Parsed using time.ParseDuration.
|
||||
SyncDelOldUsers bool `json:"sync-del-old-users"`
|
||||
|
||||
// Should an non-existent user be added to the DB if user exists in ldap directory
|
||||
SyncUserOnLogin bool `json:"syncUserOnLogin"`
|
||||
SyncUserOnLogin bool `json:"sync-user-on-login"`
|
||||
}
|
||||
|
||||
type LdapAuthenticator struct {
|
||||
|
||||
@@ -24,8 +24,8 @@ import (
|
||||
|
||||
type OpenIDConfig struct {
|
||||
Provider string `json:"provider"`
|
||||
SyncUserOnLogin bool `json:"syncUserOnLogin"`
|
||||
UpdateUserOnLogin bool `json:"updateUserOnLogin"`
|
||||
SyncUserOnLogin bool `json:"sync-user-on-login"`
|
||||
UpdateUserOnLogin bool `json:"update-user-on-login"`
|
||||
}
|
||||
|
||||
type OIDC struct {
|
||||
|
||||
@@ -15,37 +15,44 @@ var configSchema = `
|
||||
"description": "Configure how long a token is valid. As string parsable by time.ParseDuration()",
|
||||
"type": "string"
|
||||
},
|
||||
"cookieName": {
|
||||
"cookie-name": {
|
||||
"description": "Cookie that should be checked for a JWT token.",
|
||||
"type": "string"
|
||||
},
|
||||
"validateUser": {
|
||||
"validate-user": {
|
||||
"description": "Deny login for users not in database (but defined in JWT). Overwrite roles in JWT with database roles.",
|
||||
"type": "boolean"
|
||||
},
|
||||
"trustedIssuer": {
|
||||
"trusted-issuer": {
|
||||
"description": "Issuer that should be accepted when validating external JWTs ",
|
||||
"type": "string"
|
||||
},
|
||||
"syncUserOnLogin": {
|
||||
"sync-user-on-login": {
|
||||
"description": "Add non-existent user to DB at login attempt with values provided in JWT.",
|
||||
"type": "boolean"
|
||||
},
|
||||
"update-user-on-login": {
|
||||
"description": "Should an existent user attributes in the DB be updated at login attempt with values provided in JWT.",
|
||||
"type": "boolean"
|
||||
}
|
||||
},
|
||||
"required": ["max-age"]
|
||||
},
|
||||
"oidc": {
|
||||
"provider": {
|
||||
"description": "",
|
||||
"type": "string"
|
||||
},
|
||||
"syncUserOnLogin": {
|
||||
"description": "",
|
||||
"type": "boolean"
|
||||
},
|
||||
"updateUserOnLogin": {
|
||||
"description": "",
|
||||
"type": "boolean"
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"provider": {
|
||||
"description": "OpenID Connect provider URL.",
|
||||
"type": "string"
|
||||
},
|
||||
"sync-user-on-login": {
|
||||
"description": "Add non-existent user to DB at login attempt with values provided.",
|
||||
"type": "boolean"
|
||||
},
|
||||
"update-user-on-login": {
|
||||
"description": "Should an existent user attributes in the DB be updated at login attempt with values provided.",
|
||||
"type": "boolean"
|
||||
}
|
||||
},
|
||||
"required": ["provider"]
|
||||
},
|
||||
@@ -57,40 +64,40 @@ var configSchema = `
|
||||
"description": "URL of LDAP directory server.",
|
||||
"type": "string"
|
||||
},
|
||||
"user_base": {
|
||||
"user-base": {
|
||||
"description": "Base DN of user tree root.",
|
||||
"type": "string"
|
||||
},
|
||||
"search_dn": {
|
||||
"search-dn": {
|
||||
"description": "DN for authenticating LDAP admin account with general read rights.",
|
||||
"type": "string"
|
||||
},
|
||||
"user_bind": {
|
||||
"user-bind": {
|
||||
"description": "Expression used to authenticate users via LDAP bind. Must contain uid={username}.",
|
||||
"type": "string"
|
||||
},
|
||||
"user_filter": {
|
||||
"user-filter": {
|
||||
"description": "Filter to extract users for syncing.",
|
||||
"type": "string"
|
||||
},
|
||||
"username_attr": {
|
||||
"username-attr": {
|
||||
"description": "Attribute with full username. Default: gecos",
|
||||
"type": "string"
|
||||
},
|
||||
"sync_interval": {
|
||||
"sync-interval": {
|
||||
"description": "Interval used for syncing local user table with LDAP directory. Parsed using time.ParseDuration.",
|
||||
"type": "string"
|
||||
},
|
||||
"sync_del_old_users": {
|
||||
"sync-del-old-users": {
|
||||
"description": "Delete obsolete users in database.",
|
||||
"type": "boolean"
|
||||
},
|
||||
"syncUserOnLogin": {
|
||||
"sync-user-on-login": {
|
||||
"description": "Add non-existent user to DB at login attempt if user exists in Ldap directory",
|
||||
"type": "boolean"
|
||||
}
|
||||
},
|
||||
"required": ["url", "user_base", "search_dn", "user_bind", "user_filter"]
|
||||
"required": ["url", "user-base", "search-dn", "user-bind", "user-filter"]
|
||||
},
|
||||
"required": ["jwts"]
|
||||
}`
|
||||
|
||||
@@ -20,9 +20,9 @@ type ProgramConfig struct {
|
||||
Addr string `json:"addr"`
|
||||
|
||||
// Addresses from which secured admin API endpoints can be reached, can be wildcard "*"
|
||||
APIAllowedIPs []string `json:"apiAllowedIPs"`
|
||||
APIAllowedIPs []string `json:"api-allowed-ips"`
|
||||
|
||||
APISubjects *NATSConfig `json:"apiSubjects"`
|
||||
APISubjects *NATSConfig `json:"api-subjects"`
|
||||
|
||||
// Drop root permissions once .env was read and the port was taken.
|
||||
User string `json:"user"`
|
||||
@@ -37,9 +37,6 @@ type ProgramConfig struct {
|
||||
EmbedStaticFiles bool `json:"embed-static-files"`
|
||||
StaticFiles string `json:"static-files"`
|
||||
|
||||
// Database driver - only 'sqlite3' is supported
|
||||
DBDriver string `json:"db-driver"`
|
||||
|
||||
// Path to SQLite database file
|
||||
DB string `json:"db"`
|
||||
|
||||
@@ -78,7 +75,7 @@ type ProgramConfig struct {
|
||||
|
||||
type ResampleConfig struct {
|
||||
// Minimum number of points to trigger resampling of data
|
||||
MinimumPoints int `json:"minimumPoints"`
|
||||
MinimumPoints int `json:"minimum-points"`
|
||||
// Array of resampling target resolutions, in seconds; Example: [600,300,60]
|
||||
Resolutions []int `json:"resolutions"`
|
||||
// Trigger next zoom level at less than this many visible datapoints
|
||||
@@ -86,8 +83,8 @@ type ResampleConfig struct {
|
||||
}
|
||||
|
||||
type NATSConfig struct {
|
||||
SubjectJobEvent string `json:"subjectJobEvent"`
|
||||
SubjectNodeState string `json:"subjectNodeState"`
|
||||
SubjectJobEvent string `json:"subject-job-event"`
|
||||
SubjectNodeState string `json:"subject-node-state"`
|
||||
}
|
||||
|
||||
type IntRange struct {
|
||||
@@ -103,17 +100,14 @@ type TimeRange struct {
|
||||
|
||||
type FilterRanges struct {
|
||||
Duration *IntRange `json:"duration"`
|
||||
NumNodes *IntRange `json:"numNodes"`
|
||||
StartTime *TimeRange `json:"startTime"`
|
||||
NumNodes *IntRange `json:"num-nodes"`
|
||||
StartTime *TimeRange `json:"start-time"`
|
||||
}
|
||||
|
||||
var Keys ProgramConfig = ProgramConfig{
|
||||
Addr: "localhost:8080",
|
||||
DisableAuthentication: false,
|
||||
EmbedStaticFiles: true,
|
||||
DBDriver: "sqlite3",
|
||||
DB: "./var/job.db",
|
||||
Validate: false,
|
||||
SessionMaxAge: "168h",
|
||||
StopJobsExceedingWalltime: 0,
|
||||
ShortRunningJobsDuration: 5 * 60,
|
||||
|
||||
@@ -15,7 +15,7 @@ import (
|
||||
|
||||
type DefaultMetricsCluster struct {
|
||||
Name string `json:"name"`
|
||||
DefaultMetrics string `json:"default_metrics"`
|
||||
DefaultMetrics string `json:"default-metrics"`
|
||||
}
|
||||
|
||||
type DefaultMetricsConfig struct {
|
||||
|
||||
@@ -13,7 +13,7 @@ var configSchema = `
|
||||
"description": "Address where the http (or https) server will listen on (for example: 'localhost:80').",
|
||||
"type": "string"
|
||||
},
|
||||
"apiAllowedIPs": {
|
||||
"api-allowed-ips": {
|
||||
"description": "Addresses from which secured API endpoints can be reached",
|
||||
"type": "array",
|
||||
"items": {
|
||||
@@ -98,7 +98,7 @@ var configSchema = `
|
||||
"description": "Enable dynamic zoom in frontend metric plots.",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"minimumPoints": {
|
||||
"minimum-points": {
|
||||
"description": "Minimum points to trigger resampling of time-series data.",
|
||||
"type": "integer"
|
||||
},
|
||||
@@ -116,20 +116,20 @@ var configSchema = `
|
||||
},
|
||||
"required": ["trigger", "resolutions"]
|
||||
},
|
||||
"apiSubjects": {
|
||||
"api-subjects": {
|
||||
"description": "NATS subjects configuration for subscribing to job and node events.",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"subjectJobEvent": {
|
||||
"subject-job-event": {
|
||||
"description": "NATS subject for job events (start_job, stop_job)",
|
||||
"type": "string"
|
||||
},
|
||||
"subjectNodeState": {
|
||||
"subject-node-state": {
|
||||
"description": "NATS subject for node state updates",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": ["subjectJobEvent", "subjectNodeState"]
|
||||
"required": ["subject-job-event", "subject-node-state"]
|
||||
}
|
||||
}
|
||||
}`
|
||||
|
||||
@@ -50,7 +50,7 @@ func setup(t *testing.T) *repository.JobRepository {
|
||||
"main": {
|
||||
"addr": "0.0.0.0:8080",
|
||||
"validate": false,
|
||||
"apiAllowedIPs": [
|
||||
"api-allowed-ips": [
|
||||
"*"
|
||||
]},
|
||||
"archive": {
|
||||
@@ -104,7 +104,7 @@ func setup(t *testing.T) *repository.JobRepository {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
repository.Connect("sqlite3", dbfilepath)
|
||||
repository.Connect(dbfilepath)
|
||||
return repository.GetJobRepository()
|
||||
}
|
||||
|
||||
|
||||
@@ -68,7 +68,7 @@ const configSchema = `{
|
||||
"type": "integer"
|
||||
},
|
||||
"nats-subscriptions": {
|
||||
"description": "Array of various subscriptions. Allows to subscibe to different subjects and publishers.",
|
||||
"description": "Array of various subscriptions. Allows to subscribe to different subjects and publishers.",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
|
||||
@@ -51,14 +51,10 @@ func setupSqlite(db *sql.DB) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func Connect(driver string, db string) {
|
||||
func Connect(db string) {
|
||||
var err error
|
||||
var dbHandle *sqlx.DB
|
||||
|
||||
if driver != "sqlite3" {
|
||||
cclog.Abortf("Unsupported database driver '%s'. Only 'sqlite3' is supported.\n", driver)
|
||||
}
|
||||
|
||||
dbConnOnce.Do(func() {
|
||||
opts := DatabaseOptions{
|
||||
URL: db,
|
||||
@@ -100,7 +96,7 @@ func Connect(driver string, db string) {
|
||||
dbHandle.SetConnMaxLifetime(opts.ConnectionMaxLifetime)
|
||||
dbHandle.SetConnMaxIdleTime(opts.ConnectionMaxIdleTime)
|
||||
|
||||
dbConnInstance = &DBConnection{DB: dbHandle, Driver: driver}
|
||||
dbConnInstance = &DBConnection{DB: dbHandle}
|
||||
err = checkDBVersion(dbHandle.DB)
|
||||
if err != nil {
|
||||
cclog.Abortf("DB Connection: Failed DB version check.\nError: %s\n", err.Error())
|
||||
|
||||
@@ -26,7 +26,7 @@ func nodeTestSetup(t *testing.T) {
|
||||
"main": {
|
||||
"addr": "0.0.0.0:8080",
|
||||
"validate": false,
|
||||
"apiAllowedIPs": [
|
||||
"api-allowed-ips": [
|
||||
"*"
|
||||
]
|
||||
},
|
||||
@@ -139,7 +139,7 @@ func nodeTestSetup(t *testing.T) {
|
||||
}
|
||||
archiveCfg := fmt.Sprintf("{\"kind\": \"file\",\"path\": \"%s\"}", jobarchive)
|
||||
|
||||
Connect("sqlite3", dbfilepath)
|
||||
Connect(dbfilepath)
|
||||
|
||||
if err := archive.Init(json.RawMessage(archiveCfg)); err != nil {
|
||||
t.Fatal(err)
|
||||
|
||||
@@ -151,7 +151,7 @@ func setup(tb testing.TB) *JobRepository {
|
||||
dbfile := "testdata/job.db"
|
||||
err := MigrateDB(dbfile)
|
||||
noErr(tb, err)
|
||||
Connect("sqlite3", dbfile)
|
||||
Connect(dbfile)
|
||||
return GetJobRepository()
|
||||
}
|
||||
|
||||
|
||||
@@ -20,7 +20,7 @@ func setupUserTest(t *testing.T) *UserCfgRepo {
|
||||
const testconfig = `{
|
||||
"main": {
|
||||
"addr": "0.0.0.0:8080",
|
||||
"apiAllowedIPs": [
|
||||
"api-allowed-ips": [
|
||||
"*"
|
||||
]
|
||||
},
|
||||
@@ -36,7 +36,7 @@ func setupUserTest(t *testing.T) *UserCfgRepo {
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
Connect("sqlite3", dbfilepath)
|
||||
Connect(dbfilepath)
|
||||
|
||||
tmpdir := t.TempDir()
|
||||
cfgFilePath := filepath.Join(tmpdir, "config.json")
|
||||
|
||||
@@ -19,7 +19,7 @@ func setup(tb testing.TB) *repository.JobRepository {
|
||||
dbfile := "../repository/testdata/job.db"
|
||||
err := repository.MigrateDB(dbfile)
|
||||
noErr(tb, err)
|
||||
repository.Connect("sqlite3", dbfile)
|
||||
repository.Connect(dbfile)
|
||||
return repository.GetJobRepository()
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user